69 Commits

Author SHA1 Message Date
LukeGus
c7872770a1 feat: begin dashboard overhaul by splitting into cards and adding customization 2026-01-16 03:08:10 -06:00
LukeGus
004ddcb2bb feat: added -r and -l support for tunnels 2026-01-15 02:23:13 -06:00
Aditya Tawade
8eeef84b8a Enter Key for Quick Login (#513) 2026-01-15 02:03:04 -06:00
LukeGus
dc88ae5e8b feat: added quick connection system (ad-hoc) 2026-01-15 02:02:48 -06:00
LukeGus
cb478477e9 feat: add copy password button and fixed new line carriage issues and backend crash for auth key 2026-01-15 01:40:02 -06:00
LukeGus
b7bd1e50b3 feat: fix sudo password dialog ui, add totp/pass reset limiting, and refreshed users screen when auth is outdated 2026-01-14 18:14:51 -06:00
ZacharyZcR
230ab2f737 fix: add sudo support for listFiles and improve permission error handling (#512)
* feat: add sudo support for file manager operations

* fix: add sudo support for listFiles and improve permission error handling

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
2026-01-14 17:54:27 -06:00
ZacharyZcR
042bf255ef feat: add sudo support for file manager operations (#509) 2026-01-14 14:54:20 -06:00
ZacharyZcR
f7e99b5af5 feat: add toggle for password reset feature in admin settings (#508) 2026-01-14 14:49:38 -06:00
LukeGus
8fa093ae60 feat: re-added missing users.ts route from merge 2026-01-14 14:48:58 -06:00
LukeGus
dd62b77c79 feat: added sidebar management and improved some host manager UI/UX 2026-01-14 01:23:58 -06:00
LukeGus
264682c5ad feat: added close button on tab dropdown 2026-01-14 00:26:55 -06:00
LukeGus
7210381f17 feat: added toggle for command pallete 2026-01-13 23:57:21 -06:00
LukeGus
c0f4f1d74b feat: update credential editor to use submitting system and add health monitor 2026-01-13 23:48:58 -06:00
LukeGus
f957959a86 feat: update readme 2026-01-13 01:17:48 -06:00
LukeGus
a54dbe5b46 feat: update readme 2026-01-13 01:09:59 -06:00
Luke Gustafson
ac1cf82bba New Crowdin updates (#506)
* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Afrikaans)

* New translations en.json (Arabic)

* New translations en.json (Catalan)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (German)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Hebrew)

* New translations en.json (Hungarian)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Korean)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Serbian (Cyrillic))

* New translations en.json (Swedish)

* New translations en.json (Turkish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (English)

* New translations en.json (Vietnamese)

* New translations en.json (German)

* New translations en.json (Norwegian)

* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Arabic)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Swedish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Finnish)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Bulgarian)

* New translations en.json (Indonesian)

* New translations en.json (Hindi)

* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Afrikaans)

* New translations en.json (Arabic)

* New translations en.json (Catalan)

* New translations en.json (Czech)

* New translations en.json (German)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Hebrew)

* New translations en.json (Hungarian)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Korean)

* New translations en.json (Dutch)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Serbian (Cyrillic))

* New translations en.json (Turkish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Vietnamese)

* New translations en.json (Portuguese, Brazilian)

* New translations en.json (Bulgarian)

* New translations en.json (Indonesian)

* New translations en.json (Hindi)

* New translations en.json (Bengali)

* New translations en.json (Thai)
2026-01-13 00:55:19 -06:00
LukeGus
de556e3911 feat: remove locales 2026-01-13 00:55:10 -06:00
LukeGus
9be6b945c8 feat: add crowdin i18n 2026-01-13 00:39:35 -06:00
Luke Gustafson
1aebbee21e New Crowdin updates (#505)
* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Afrikaans)

* New translations en.json (Arabic)

* New translations en.json (Catalan)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (German)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Hebrew)

* New translations en.json (Hungarian)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Korean)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Serbian (Cyrillic))

* New translations en.json (Swedish)

* New translations en.json (Turkish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (English)

* New translations en.json (Vietnamese)

* New translations en.json (German)

* New translations en.json (Norwegian)

* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Arabic)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Swedish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Finnish)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Bulgarian)

* New translations en.json (Indonesian)

* New translations en.json (Hindi)
2026-01-13 00:09:26 -06:00
ZacharyZcR
80c09aef7d feat: add option to disable update checker (#502)
* feat: add option to disable update checker

Add a new setting in User Profile > Settings to disable automatic
update checking on startup and dashboard.

- Adds 'Disable Update Check' toggle in profile settings
- Skips GitHub API calls when disabled (reduces network requests)
- Works for both web app and Electron client

Fixes Termix-SSH/Support#410

* feat: remove locales

---------

Co-authored-by: LukeGus <bugattiguy527@gmail.com>
2026-01-13 00:02:23 -06:00
Luke Gustafson
115a1fd7f0 New Crowdin updates (#504)
* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Afrikaans)

* New translations en.json (Arabic)

* New translations en.json (Catalan)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (German)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Hebrew)

* New translations en.json (Hungarian)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Korean)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Serbian (Cyrillic))

* New translations en.json (Swedish)

* New translations en.json (Turkish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (English)

* New translations en.json (Vietnamese)

* New translations en.json (German)

* New translations en.json (Norwegian)

* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Arabic)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Swedish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)

* New translations en.json (Finnish)

* New translations en.json (Chinese Simplified)

* New translations en.json (Chinese Traditional)
2026-01-12 23:59:17 -06:00
LukeGus
a1c260ad22 Merge remote-tracking branch 'origin/dev-1.10.1' into dev-1.10.1 2026-01-12 23:58:52 -06:00
LukeGus
18aa4f4877 feat: remove locales 2026-01-12 23:58:45 -06:00
ZacharyZcR
8fc038e59b feat: add Ctrl+Alt key remapping for browser-blocked shortcuts (#501)
Browsers intercept Ctrl+W/T/N/Q, making them unusable in terminal.
This adds Ctrl+Alt+<key> as an alternative that sends Ctrl+<key>.

- Ctrl+Alt+W → Ctrl+W (nano search, delete word)
- Ctrl+Alt+T → Ctrl+T (transpose chars)
- Ctrl+Alt+N → Ctrl+N (next line)
- Ctrl+Alt+Q → Ctrl+Q (XON flow control)

Fixes Termix-SSH/Support#407
2026-01-12 23:58:13 -06:00
ZacharyZcR
aea87be4d3 feat: support URL routes to open terminal directly (#156) (#503)
* fix: resolve merge conflict artifacts in dev-1.10.1

- Fix missing closing tags in AppView.tsx NetworkGraphView
- Fix incomplete catch blocks in server-stats.ts and db/index.ts
- Fix missing closing brace in en.json ports section
- Fix HostManagerApp.tsx import path
- Fix stats-widgets.ts type definition
- Fix schema.ts networkTopology table definition
- Add type annotations in user-data-import.ts

* feat: support URL routes to open terminal directly (#156)

- Add /terminal/{hostNameOrId} route for new format
- Keep /hosts/{id}/terminal for backward compatibility
- Smart detection: numeric IDs for ID lookup, otherwise name lookup
- Clean URL after opening to prevent duplicate on refresh
- Show toast error when host not found

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
2026-01-12 23:57:09 -06:00
LukeGus
7caa32b364 feat: remove locales 2026-01-12 23:41:28 -06:00
LukeGus
868ac39b71 feat: add workflow/config to auto generate openapi json 2026-01-12 19:28:25 -06:00
LukeGus
8ae8520c44 feat: fix network stats merge and add openapi jsdocs comments 2026-01-12 19:12:08 -06:00
Luke Gustafson
8ce4c6f364 Merge pull request #478 from SteveJos/feature/add-network-graph
Feature request: Network graph
2026-01-12 03:06:02 -05:00
Luke Gustafson
a9a1a4b3d5 Merge branch 'dev-1.10.1' into feature/add-network-graph 2026-01-12 03:05:50 -05:00
ZacharyZcR
2e3f7e10c7 feat: add listening ports widget for server stats (#483)
Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
2026-01-12 01:46:05 -06:00
Luke Gustafson
d821373b15 New Crowdin updates (#472)
* New translations en.json (Romanian)

* New translations en.json (French)

* New translations en.json (Spanish)

* New translations en.json (Afrikaans)

* New translations en.json (Arabic)

* New translations en.json (Catalan)

* New translations en.json (Czech)

* New translations en.json (Danish)

* New translations en.json (German)

* New translations en.json (Greek)

* New translations en.json (Finnish)

* New translations en.json (Hebrew)

* New translations en.json (Hungarian)

* New translations en.json (Italian)

* New translations en.json (Japanese)

* New translations en.json (Korean)

* New translations en.json (Dutch)

* New translations en.json (Norwegian)

* New translations en.json (Polish)

* New translations en.json (Portuguese)

* New translations en.json (Russian)

* New translations en.json (Serbian (Cyrillic))

* New translations en.json (Swedish)

* New translations en.json (Turkish)

* New translations en.json (Ukrainian)

* New translations en.json (Chinese Simplified)

* New translations en.json (English)

* New translations en.json (Vietnamese)

* New translations en.json (German)
2026-01-12 01:41:46 -06:00
Nunzio Marfè
816172d67b Feature: PWA (#479)
* feat: add PWA support with offline capabilities

- Add web app manifest with icons and theme configuration
- Add service worker with cache-first strategy for static assets
- Add useServiceWorker hook for SW registration
- Add PWA meta tags and Apple-specific tags to index.html
- Update vite.config.ts for optimal asset caching

* Update package-lock.json
2026-01-12 01:36:03 -06:00
ZacharyZcR
ceff07c685 feat: add firewall status widget for server stats (#484) 2026-01-12 01:31:21 -06:00
ZacharyZcR
1eb28dec8b fix: skip existing hosts and credentials during JSON import (#485)
Added duplicate detection for SSH hosts (by ip+port+username) and
credentials (by name) during import. Existing items are now skipped
by default, or updated if replaceExisting option is enabled.

This matches the existing behavior of importDismissedAlerts.

Fixes #389
2026-01-12 01:31:04 -06:00
ZacharyZcR
2b6361cbb6 fix: nginx permission denied on restricted kernels (#486) 2026-01-12 01:30:51 -06:00
ZacharyZcR
e6870f962a fix: delete all related data when removing user (#487) 2026-01-12 01:30:40 -06:00
ZacharyZcR
99b0181c45 fix: set default lineHeight to 1.0 for TUI apps compatibility (#488) 2026-01-12 01:30:27 -06:00
ZacharyZcR
58945288e0 fix: add shell creation timeout and improve error handling (#489) 2026-01-12 01:30:13 -06:00
ZacharyZcR
afb66a1098 fix: prevent session reset when updating host properties (#490) 2026-01-12 01:30:01 -06:00
ZacharyZcR
5f080be4ee fix: use correct MIME types for image preview (#491) 2026-01-12 01:29:35 -06:00
ZacharyZcR
4648549e74 fix: owner should not be marked as shared when host is shared to their role (#492) 2026-01-12 01:29:24 -06:00
ZacharyZcR
f5d948aa45 feat: add Docker container healthcheck (#493) 2026-01-12 01:29:02 -06:00
ZacharyZcR
81d506afba fix: restore SSH connection timeout to 120s for 2FA authentication (#494)
The timeout was reduced from 120s to 30s in v1.10, causing 2FA login
failures. Users with keyboard-interactive authentication (TOTP/2FA)
need sufficient time to enter their verification codes before the
SSH connection times out.

Fixes #404
2026-01-12 01:28:38 -06:00
ZacharyZcR
4150faa558 fix: use SFTP readdir for file listing to support non-Linux systems (#495)
The file manager now uses SFTP readdir as the primary method for
listing files, with ls -la as a fallback. This enables compatibility
with MikroTik RouterOS and other non-Linux systems that don't have
standard shell commands.

Fixes #317
2026-01-12 01:28:17 -06:00
ZacharyZcR
7ecfb4d685 fix: prevent long container names from overflowing card (#496)
Added min-w-0 to CardTitle to allow text truncation in flexbox.
Without this, flex items have min-width: auto which prevents
the truncate class from working properly.

Fixes #411
2026-01-12 01:27:58 -06:00
LukeGus
614f2f84ec fix: update readme 2026-01-12 00:29:14 -06:00
LukeGus
af63fe1b7b Merge remote-tracking branch 'origin/dev-1.10.1' into dev-1.10.1 2026-01-12 00:22:09 -06:00
LukeGus
4896b71b01 fix: remove top tech 2026-01-12 00:21:59 -06:00
Nunzio Marfè
69f3f88ae5 Handle enter button (#481)
* Update Crowdin configuration file

* Update Crowdin configuration file

* Update Linux Portable section with AUR link (#474)

* fix: file manager incorrectly decoding/encoding when editing files (#476)

* fix: electron build errors and skip macos job

* fix: testflight submit failure

* fix: made submit job match build type

* fix: resolve Vite build warnings for mixed static/dynamic imports (#473)

* Update Crowdin configuration file

* Update Crowdin configuration file

* fix: resolve Vite build warnings for mixed static/dynamic imports

- Convert all dynamic imports of main-axios.ts to static imports (10 files)
- Convert all dynamic imports of sonner to static imports (4 files)
- Add manual chunking configuration to vite.config.ts for better bundle splitting
  - react-vendor: React and React DOM
  - ui-vendor: Radix UI, lucide-react, clsx, tailwind-merge
  - monaco: Monaco Editor
  - codemirror: CodeMirror and related packages
- Increase chunkSizeWarningLimit to 1000kB

This resolves Vite warnings about mixed import strategies preventing
proper code-splitting.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>

* fix: file manager incorrectly decoding/encoding when editing files (made base64/utf8 dependent)

---------

Co-authored-by: Jefferson Nunn <89030989+jeffersonwarrior@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>

* fix: build error on docker (#477)

* fix: electron build errors and skip macos job

* fix: testflight submit failure

* fix: made submit job match build type

* fix: resolve Vite build warnings for mixed static/dynamic imports (#473)

* Update Crowdin configuration file

* Update Crowdin configuration file

* fix: resolve Vite build warnings for mixed static/dynamic imports

- Convert all dynamic imports of main-axios.ts to static imports (10 files)
- Convert all dynamic imports of sonner to static imports (4 files)
- Add manual chunking configuration to vite.config.ts for better bundle splitting
  - react-vendor: React and React DOM
  - ui-vendor: Radix UI, lucide-react, clsx, tailwind-merge
  - monaco: Monaco Editor
  - codemirror: CodeMirror and related packages
- Increase chunkSizeWarningLimit to 1000kB

This resolves Vite warnings about mixed import strategies preventing
proper code-splitting.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>

* fix: file manager incorrectly decoding/encoding when editing files (made base64/utf8 dependent)

* fix: build error on docker

---------

Co-authored-by: Jefferson Nunn <89030989+jeffersonwarrior@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>

* Increase max old space size for npm builds

* Increase Node.js memory limit in Dockerfile

* Remove NODE_OPTIONS from build commands in Dockerfile

* Change runner to blacksmith-4vcpu-ubuntu-2404

* fix: build error on docker

* Add handle on enter button;

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Gaylord Julien <g.j@mailbox.org>
Co-authored-by: Jefferson Nunn <89030989+jeffersonwarrior@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: LukeGus <bugattiguy527@gmail.com>
2026-01-04 10:29:11 -05:00
LukeGus
d632f2b91f fix: build error on docker 2026-01-02 01:52:25 -06:00
LukeGus
5366cb24ef Merge remote-tracking branch 'origin/dev-1.10.1' into dev-1.10.1 2026-01-02 00:32:33 -06:00
LukeGus
177e783f92 fix: file manager incorrectly decoding/encoding when editing files (made base64/utf8 dependent) 2026-01-02 00:32:22 -06:00
Jefferson Nunn
1a2179c345 fix: resolve Vite build warnings for mixed static/dynamic imports (#473)
* Update Crowdin configuration file

* Update Crowdin configuration file

* fix: resolve Vite build warnings for mixed static/dynamic imports

- Convert all dynamic imports of main-axios.ts to static imports (10 files)
- Convert all dynamic imports of sonner to static imports (4 files)
- Add manual chunking configuration to vite.config.ts for better bundle splitting
  - react-vendor: React and React DOM
  - ui-vendor: Radix UI, lucide-react, clsx, tailwind-merge
  - monaco: Monaco Editor
  - codemirror: CodeMirror and related packages
- Increase chunkSizeWarningLimit to 1000kB

This resolves Vite warnings about mixed import strategies preventing
proper code-splitting.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Termix CI <ci@termix.dev>
Co-authored-by: Claude <noreply@anthropic.com>
2026-01-01 18:43:38 -06:00
LukeGus
bdf9ea282e fix: made submit job match build type 2025-12-31 23:33:17 -06:00
LukeGus
6feb8405ce fix: testflight submit failure 2025-12-31 23:29:46 -06:00
LukeGus
2ee1318ded fix: electron build errors and skip macos job 2025-12-31 22:54:32 -06:00
LukeGus
51e6826c95 chore: update termix rb for 1.10.0 2025-12-31 22:45:43 -06:00
Luke Gustafson
ad86c2040b v1.10.0 (#471)
* fix select edit host but not update view (#438)

* fix: Checksum issue with chocolatey

* fix: Remove homebrew old stuff

* Add Korean translation (#439)

Co-authored-by: 송준우 <2484@coreit.co.kr>

* feat: Automate flatpak

* fix: Add imagemagik to electron builder to resolve build error

* fix: Build error with runtime repo flag

* fix: Flatpak runtime error and install freedesktop ver warning

* fix: Flatpak runtime error and install freedesktop ver warning

* feat: Re-add homebrew cask and move scripts to backend

* fix: No sandbox flag issue

* fix: Change name for electron macos cask output

* fix: Sandbox error with Linux

* fix: Remove comming soon for app stores in readme

* Adding Comment at the end of the public_key on the host on deploy (#440)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* -Add New Interface for Credential DB
-Add Credential Name as a comment into the server authorized_key file

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* Sudo auto fill password (#441)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* Feature Sudo password auto-fill;

* Fix locale json shema;

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* Added Italian Language; (#445)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* Added Italian Language;

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* Auto collapse snippet folders (#448)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* feat: Add collapsable snippets (customizable in user profile)

* Translations (#447)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* Added Italian Language;

* Fix translations;

Removed duplicate keys, synchronised other languages using English as the source, translated added keys, fixed inaccurate translations.

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* Remove PTY-level keepalive (#449)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* Remove PTY-level keepalive to prevent unwanted terminal output; use SSH-level keepalive instead

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* feat: Seperate server stats and tunnel management (improved both UI's) then started initial docker implementation

* fix: finalize adding docker to db

* feat: Add docker management support (local squash)

* Fix RBAC role system bugs and improve UX (#446)

* Fix RBAC role system bugs and improve UX

- Fix user list dropdown selection in host sharing
- Fix role sharing permissions to include role-based access
- Fix translation template interpolation for success messages
- Standardize system roles to admin and user only
- Auto-assign user role to new registrations
- Remove blocking confirmation dialogs in modal contexts
- Add missing i18n keys for common actions
- Fix button type to prevent unintended form submissions

* Enhance RBAC system with UI improvements and security fixes

- Move role assignment to Users tab with per-user role management
- Protect system roles (admin/user) from editing and manual assignment
- Simplify permission system: remove Use level, keep View and Manage
- Hide Update button and Sharing tab for view-only/shared hosts
- Prevent users from sharing hosts with themselves
- Unify table and modal styling across admin panels
- Auto-assign system roles on user registration
- Add permission metadata to host interface

* Add empty state message for role assignment

- Display helpful message when no custom roles available
- Clarify that system roles are auto-assigned
- Add noCustomRolesToAssign translation in English and Chinese

* fix: Prevent credential sharing errors for shared hosts

- Skip credential resolution for shared hosts with credential authentication
  to prevent decryption errors (credentials are encrypted per-user)
- Add warning alert in sharing tab when host uses credential authentication
- Inform users that shared users cannot connect to credential-based hosts
- Add translations for credential sharing warning (EN/ZH)

This prevents authentication failures when sharing hosts configured
with credential authentication while maintaining security by keeping
credentials isolated per user.

* feat: Improve rbac UI and fixes some bugs

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: LukeGus <bugattiguy527@gmail.com>

* SOCKS5 support (#452)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* SOCKS5 support

Adding single and chain socks5 proxy support

* fix: cleanup files

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: LukeGus <bugattiguy527@gmail.com>

* Notes and Expiry fields add (#453)

* Add termix.rb Cask file

* Update Termix to version 1.9.0 with new checksum

* Update README to remove 'coming soon' notes

* Notes and Expiry add

* fix: cleanup files

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: LukeGus <bugattiguy527@gmail.com>

* fix: ssh host types

* fix: sudo incorrect styling and remove expiration date

* feat: add sudo password and add diagonal bg's

* fix: snippet running on enter key

* fix: base64 decoding

* fix: improve server stats / rbac

* fix: wrap ssh host json export in hosts array

* feat: auto trim host inputs, fix file manager jump hosts, dashboard prevent duplicates, file manager terminal not size updating, improve left sidebar sorting, hide/show tags, add apperance user profile tab, add new host manager tabs.

* feat: improve terminal connection speed

* fix: sqlite constriant errors and support non-root user (nginx perm issue)

* feat: add beta syntax highlighing to terminal

* feat: update imports and improve admin settings user management

* chore: update translations

* chore: update translations

* feat: Complete light mode implementation with semantic theme system (#450)

- Add comprehensive light/dark mode CSS variables with semantic naming
- Implement theme-aware scrollbars using CSS variables
- Add light mode backgrounds: --bg-base, --bg-elevated, --bg-surface, etc.
- Add theme-aware borders: --border-base, --border-panel, --border-subtle
- Add semantic text colors: --foreground-secondary, --foreground-subtle
- Convert oklch colors to hex for better compatibility
- Add theme awareness to CodeMirror editors
- Update dark mode colors for consistency (background, sidebar, card, muted, input)
- Add Tailwind color mappings for semantic classes

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* fix: syntax errors

* chore: updating/match themes and split admin settings

* feat: add translation workflow and remove old translation.json

* fix: translation workflow error

* fix: translation workflow error

* feat: improve translation system and update workflow

* fix: wrong path for translations

* fix: change translation to flat files

* fix: gh rule error

* chore: auto-translate to multiple languages (#458)

* chore: improve organization and made a few styling changes in host manager

* feat: improve terminal stability and split out the host manager

* fix: add unnversiioned files

* chore: migrate all to use the new theme system

* fix: wrong animation line colors

* fix: rbac implementation general issues (local squash)

* fix: remove unneeded files

* feat: add 10 new langs

* chore: update gitnore

* chore: auto-translate to multiple languages (#459)

* fix: improve tunnel system

* fix: properly split tabs, still need to fix up the host manager

* chore: cleanup files (possible RC)

* feat: add norwegian

* chore: auto-translate to multiple languages (#461)

* fix: small qol fixes and began readme update

* fix: run cleanup script

* feat: add docker docs button

* feat: general bug fixes and readme updates

* fix: translations

* chore: auto-translate to multiple languages (#462)

* fix: cleanup files

* fix: test new translation issue and add better server-stats support

* fix: fix translate error

* chore: auto-translate to multiple languages (#463)

* fix: fix translate mismatching text

* chore: auto-translate to multiple languages (#465)

* fix: fix translate mismatching text

* fix: fix translate mismatching text

* chore: auto-translate to multiple languages (#466)

* fix: fix translate mismatching text

* fix: fix translate mismatching text

* fix: fix translate mismatching text

* chore: auto-translate to multiple languages (#467)

* fix: fix translate mismatching text

* chore: auto-translate to multiple languages (#468)

* feat: add to readme, a few qol changes, and improve server stats in general

* chore: auto-translate to multiple languages (#469)

* feat: turned disk uage into graph and fixed issue with termina console

* fix: electron build error and hide icons when shared

* chore: run clean

* fix: general server stats issues, file manager decoding, ui qol

* fix: add dashboard line breaks

* fix: docker console error

* fix: docker console not loading and mismatched stripped background for electron

* fix: docker console not loading

* chore: docker console not loading in docker

* chore: translate readme to chinese

* chore: match package lock to package json

* chore: nginx config issue for dokcer console

* chore: auto-translate to multiple languages (#470)

---------

Co-authored-by: Tran Trung Kien <kientt13.7@gmail.com>
Co-authored-by: junu <bigdwarf_@naver.com>
Co-authored-by: 송준우 <2484@coreit.co.kr>
Co-authored-by: SlimGary <trash.slim@gmail.com>
Co-authored-by: Nunzio Marfè <nunzio.marfe@protonmail.com>
Co-authored-by: Wesley Reid <starhound@lostsouls.org>
Co-authored-by: ZacharyZcR <zacharyzcr1984@gmail.com>
Co-authored-by: Denis <38875137+Medvedinca@users.noreply.github.com>
Co-authored-by: Peet McKinney <68706879+PeetMcK@users.noreply.github.com>
2025-12-31 22:20:12 -06:00
Steven Josefs
0216a2d2fe Fixing PR442:
- Fixed:
    - UI design elemets
    - UI and button colors
    - JSON export
    - recent activity is default again
- Removed:
    - Online/Offline UI labels
    - left-click menu on hosts
- Added:
    - small pulsing dot inside the hosts to indicate online status like in the left bar
2025-12-30 18:20:57 +01:00
Luke Gustafson
7139290d14 Add GitHub Actions workflow for auto translation
This workflow automates the translation of JSON files using the i18n-ai-translate action, committing changes back to the repository.
2025-12-24 14:04:28 -06:00
Steven Josefs
8106999d1e Feature request network graph 2025-12-07 20:50:03 +01:00
Luke Gustafson
f0647dc7c1 Update README to remove 'coming soon' notes 2025-11-26 23:38:58 -06:00
Luke Gustafson
403800f42b Update Termix to version 1.9.0 with new checksum 2025-11-26 19:36:24 -06:00
Luke Gustafson
84ca8080f0 Add termix.rb Cask file 2025-11-26 19:33:15 -06:00
Luke Gustafson
8366c99b0f v1.9.0 (#437)
* fix: Resolve database encryption atomicity issues and enhance debugging (#430)

* fix: Resolve database encryption atomicity issues and enhance debugging

This commit addresses critical data corruption issues caused by non-atomic
file writes during database encryption, and adds comprehensive diagnostic
logging to help debug encryption-related failures.

**Problem:**
Users reported "Unsupported state or unable to authenticate data" errors
when starting the application after system crashes or Docker container
restarts. The root cause was non-atomic writes of encrypted database files:

1. Encrypted data file written (step 1)
2. Metadata file written (step 2)
→ If process crashes between steps 1 and 2, files become inconsistent
→ New IV/tag in data file, old IV/tag in metadata
→ GCM authentication fails on next startup
→ User data permanently inaccessible

**Solution - Atomic Writes:**

1. Write-to-temp + atomic-rename pattern:
   - Write to temporary files (*.tmp-timestamp-pid)
   - Perform atomic rename operations
   - Clean up temp files on failure

2. Data integrity validation:
   - Add dataSize field to metadata
   - Verify file size before decryption
   - Early detection of corrupted writes

3. Enhanced error diagnostics:
   - Key fingerprints (SHA256 prefix) for verification
   - File modification timestamps
   - Detailed GCM auth failure messages
   - Automatic diagnostic info generation

**Changes:**

database-file-encryption.ts:
- Implement atomic write pattern in encryptDatabaseFromBuffer
- Implement atomic write pattern in encryptDatabaseFile
- Add dataSize field to EncryptedFileMetadata interface
- Validate file size before decryption in decryptDatabaseToBuffer
- Enhanced error messages for GCM auth failures
- Add getDiagnosticInfo() function for comprehensive debugging
- Add debug logging for all encryption/decryption operations

system-crypto.ts:
- Add detailed logging for DATABASE_KEY initialization
- Log key source (env var vs .env file)
- Add key fingerprints to all log messages
- Better error messages when key loading fails

db/index.ts:
- Automatically generate diagnostic info on decryption failure
- Log detailed debugging information to help users troubleshoot

**Debugging Info Added:**

- Key initialization: source, fingerprint, length, path
- Encryption: original size, encrypted size, IV/tag prefixes, temp paths
- Decryption: file timestamps, metadata content, key fingerprint matching
- Auth failures: .env file status, key availability, file consistency
- File diagnostics: existence, readability, size validation, mtime comparison

**Backward Compatibility:**
- dataSize field is optional (metadata.dataSize?: number)
- Old encrypted files without dataSize continue to work
- No migration required

**Testing:**
- Compiled successfully
- No breaking changes to existing APIs
- Graceful handling of legacy v1 encrypted files

Fixes data loss issues reported by users experiencing container restarts
and system crashes during database saves.

* fix: Cleanup PR

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: LukeGus <bugattiguy527@gmail.com>
Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* fix: Merge metadata and DB into 1 file

* fix: Add initial command palette

* Feature/german language support (#431)

* Update translation.json

Fixed some translation issues for German, made it more user friendly and common.

* Update translation.json

added updated block for serverStats

* Update translation.json

Added translations

* Update translation.json

Removed duplicate of "free":"Free"

* feat: Finalize command palette

* fix: Several bug fixes for terminals, server stats, and general feature improvements

* feat: Enhanced security, UI improvements, and animations (#432)

* fix: Remove empty catch blocks and add error logging

* refactor: Modularize server stats widget collectors

* feat: Add i18n support for terminal customization and login stats

- Add comprehensive terminal customization translations (60+ keys) for appearance, behavior, and advanced settings across all 4 languages
- Add SSH login statistics translations
- Update HostManagerEditor to use i18n for all terminal customization UI elements
- Update LoginStatsWidget to use i18n for all UI text
- Add missing logger imports in backend files for improved debugging

* feat: Add keyboard shortcut enhancements with Kbd component

- Add shadcn kbd component for displaying keyboard shortcuts
- Enhance file manager context menu to display shortcuts with Kbd component
- Add 5 new keyboard shortcuts to file manager:
  - Ctrl+D: Download selected files
  - Ctrl+N: Create new file
  - Ctrl+Shift+N: Create new folder
  - Ctrl+U: Upload files
  - Enter: Open/run selected file
- Add keyboard shortcut hints to command palette footer
- Create helper function to parse and render keyboard shortcuts

* feat: Add i18n support for command palette

- Add commandPalette translation section with 22 keys to all 4 languages
- Update CommandPalette component to use i18n for all UI text
- Translate search placeholder, group headings, menu items, and shortcut hints
- Support multilingual command palette interface

* feat: Add smooth transitions and animations to UI

- Add fade-in/fade-out transition to command palette (200ms)
- Add scale animation to command palette on open/close
- Add smooth popup animation to context menu (150ms)
- Add visual feedback for file selection with ring effect
- Add hover scale effect to file grid items
- Add transition-all to list view items for consistent behavior
- Zero JavaScript overhead, pure CSS transitions
- All animations under 200ms for instant feel

* feat: Add button active state and dashboard card animations

- Add active:scale-95 to all buttons for tactile click feedback
- Add hover border effect to dashboard cards (150ms transition)
- Add pulse animation to dashboard loading states
- Pure CSS transitions with zero JavaScript overhead
- Improves enterprise-level feel of UI

* feat: Add smooth macOS-style page transitions

- Add fullscreen crossfade transition for login/logout (300ms fade-out + 400ms fade-in)
- Add slide-in-from-right animation for all page switches (Dashboard, Terminal, SSH Manager, Admin, Profile)
- Fix TypeScript compilation by adding esModuleInterop to tsconfig.node.json
- Pass handleLogout from DesktopApp to LeftSidebar for consistent transition behavior

All page transitions now use Tailwind animate-in utilities with 300ms duration for smooth, native-feeling UX

* fix: Add key prop to force animation re-trigger on tab switch

Each page container now has key={currentTab} to ensure React unmounts and remounts the element on every tab switch, properly triggering the slide-in animation

* revert: Remove page transition animations

Page switching animations were not noticeable enough and felt unnecessary.
Keep only the login/logout fullscreen crossfade transitions which provide clear visual feedback for authentication state changes

* feat: Add ripple effect to login/logout transitions

Add three-layer expanding ripple animation during fadeOut phase:
- Ripples expand from screen center using primary theme color
- Each layer has staggered delay (0ms, 150ms, 300ms) for wave effect
- Ripples fade out as they expand to create elegant visual feedback
- Uses pure CSS keyframe animation, no external libraries

Total animation: 800ms ripple + 300ms screen fade

* feat: Add smooth TERMIX logo animation to transitions

Changes:
- Extend transition duration from 300ms/400ms to 800ms/600ms for more elegant feel
- Reduce ripple intensity from /20,/15,/10 to /8,/5 for subtlety
- Slow down ripple animation from 0.8s to 2s with cubic-bezier easing
- Add centered TERMIX logo with monospace font and subtitle
- Logo fades in from 80% scale, holds, then fades out at 110% scale
- Total effect: 1.2s logo animation synced with 2s ripple waves

Creates a premium, branded transition experience

* feat: Enhance transition animation with premium details

Timing adjustments:
- Extend fadeOut from 800ms to 1200ms
- Extend fadeIn from 600ms to 800ms
- Slow background fade to 700ms for elegance

Visual enhancements:
- Add 4-layer ripple waves (10%, 7%, 5%, 3% opacity) with staggered delays
- Ripple animation extended to 2.5s with refined opacity curve
- Logo blur effect: starts at 8px, sharpens to 0px, exits at 4px
- Logo glow effect: triple-layer text-shadow using primary theme color
- Increase logo size from text-6xl to text-7xl
- Subtitle delayed fade-in from bottom with smooth slide animation

Creates a cinematic, polished brand experience

* feat: Redesign login page with split-screen cinematic layout

Major redesign of authentication page:

Left Side (40% width):
- Full-height gradient background using primary theme color
- Large TERMIX logo with glow effect
- Subtitle and tagline
- Infinite animated ripple waves (3 layers)
- Hidden on mobile, shows brand identity

Right Side (60% width):
- Centered glassmorphism card with backdrop blur
- Refined tab switcher with pill-style active state
- Enlarged title with gradient text effect
- Added welcome subtitles for better UX
- Card slides in from bottom on load
- All existing functionality preserved

Visual enhancements:
- Tab navigation: segmented control style in muted container
- Active tab: white background with subtle shadow
- Smooth 200ms transitions on all interactions
- Card: rounded-2xl, shadow-xl, semi-transparent border

Creates premium, modern login experience matching transition animations

* feat: Update login page theme colors and add i18n support

- Changed login page gradient from blue to match dark theme colors
- Updated ripple effects to use theme primary color
- Added i18n translation keys for login page (auth.tagline, auth.description, auth.welcomeBack, auth.createAccount, auth.continueExternal)
- Updated all language files (en, zh, de, ru, pt-BR) with new translations
- Fixed TypeScript compilation issues by clearing build cache

* refactor: Use shadcn Tabs component and fix modal styling

- Replace custom tab navigation with shadcn Tabs component
- Restore border-2 border-dark-border for modal consistency
- Remove circular icon from login success message
- Simplify authentication success display

* refactor: Remove ripple effects and gradient from login page

- Remove animated ripple background effects
- Remove gradient background, use solid color (bg-dark-bg-darker)
- Remove text-shadow glow effect from logo
- Simplify brand showcase to clean, minimal design

* feat: Add decorative slash and remove subtitle from login page

- Add decorative slash divider with gradient lines below TERMIX logo
- Remove subtitle text (welcomeBack and createAccount)
- Simplify page title to show only the main heading

* feat: Add diagonal line pattern background to login page

- Replace decorative slash with subtle diagonal line pattern background
- Use repeating-linear-gradient at 45deg angle
- Set very low opacity (0.03) for subtle effect
- Pattern uses theme primary color

* fix: Display diagonal line pattern on login background

- Combine background color and pattern in single style attribute
- Use white semi-transparent lines (rgba 0.03 opacity)
- 45deg angle, 35px spacing, 2px width
- Remove separate overlay div to ensure pattern visibility

* security: Fix user enumeration vulnerability in login

- Unify error messages for invalid username and incorrect password
- Both return 401 status with 'Invalid username or password'
- Prevent attackers from enumerating valid usernames
- Maintain detailed logging for debugging purposes
- Changed from 404 'User not found' to generic auth failure message

* security: Add login rate limiting to prevent brute force attacks

- Implement LoginRateLimiter with IP and username-based tracking
- Block after 5 failed attempts within 15 minutes
- Lock account/IP for 15 minutes after threshold
- Automatic cleanup of expired entries every 5 minutes
- Track remaining attempts in logs for monitoring
- Return 429 status with remaining time on rate limit
- Reset counters on successful login
- Dual protection: both IP-based and username-based limits

* French translation (#434)

* Adding French Language

* Enhancements

* feat: Replace the old ssh tools system with a new dedicated sidebar

* fix: Merge zac/luke

* fix: Finalize new sidebar, improve and loading animations

* Added ability to close non-primary tabs involved in a split view (#435)

* fix: General bug fixes/small feature improvements

* feat: General UI improvements and translation updates

* fix: Command history and file manager styling issues

* feat: General bug fixes, added server stat commands, improved split screen, link accounts, etc

* fix: add Accept header for OIDC callback request (#436)

* Delete DOWNLOADS.md

* fix: add Accept header for OIDC callback request

---------

Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>

* fix: More bug fixes and QOL fixes

* fix: Server stats not respecting interval and fixed SSH toool type issues

* fix: Remove github links

* fix: Delete account spacing

* fix: Increment version

* fix: Unable to delete hosts and add nginx for terminal

* fix: Unable to delete hosts

* fix: Unable to delete hosts

* fix: Unable to delete hosts

* fix: OIDC/local account linking breaking both logins

* chore: File cleanup

* feat: Max terminal tab size and save current file manager sorting type

* fix: Terminal display issue, migrate host editor to use combobox

* feat: Add snippet folder/customization system

* fix: Fix OIDC linking and prep release

* fix: Increment version

---------

Co-authored-by: ZacharyZcR <zacharyzcr1984@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Max <herzmaximilian@gmail.com>
Co-authored-by: SlimGary <trash.slim@gmail.com>
Co-authored-by: jarrah31 <jarrah31@gmail.com>
Co-authored-by: Kf637 <mail@kf637.tech>
2025-11-17 09:46:05 -06:00
Luke Gustafson
38a59f3579 Delete DOWNLOADS.md 2025-11-11 11:18:06 -06:00
LukeGus
9ca7df6542 fix: Auth ref error 2025-11-05 11:43:14 -06:00
283 changed files with 130731 additions and 19677 deletions

View File

@@ -17,7 +17,7 @@ on:
jobs:
build:
runs-on: ubuntu-latest
runs-on: blacksmith-4vcpu-ubuntu-2404
steps:
- name: Checkout repository
uses: actions/checkout@v5
@@ -84,7 +84,8 @@ jobs:
labels: |
org.opencontainers.image.source=https://github.com/${{ github.repository }}
org.opencontainers.image.revision=${{ github.sha }}
outputs: type=registry,compression=zstd,compression-level=19
org.opencontainers.image.created=${{ github.run_id }}
outputs: type=registry,compression=gzip,compression-level=9
- name: Cleanup Docker
if: always()

View File

@@ -27,7 +27,7 @@ on:
jobs:
build-windows:
runs-on: windows-latest
if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == ''
if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
permissions:
contents: write
@@ -72,10 +72,6 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run build && npx electron-builder --win --x64 --ia32
- name: List release files
run: |
dir release
- name: Upload Windows x64 NSIS Installer
uses: actions/upload-artifact@v4
if: hashFiles('release/termix_windows_x64_nsis.exe') != '' && github.event.inputs.artifact_destination != 'none'
@@ -136,7 +132,7 @@ jobs:
build-linux:
runs-on: blacksmith-4vcpu-ubuntu-2404
if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == ''
if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
permissions:
contents: write
@@ -199,17 +195,6 @@ jobs:
cd ..
- name: List release files
run: |
ls -la release/
- name: Debug electron-builder output
if: always()
run: |
if [ -f "release/builder-debug.yml" ]; then
cat release/builder-debug.yml
fi
- name: Upload Linux x64 AppImage
uses: actions/upload-artifact@v4
if: hashFiles('release/termix_linux_x64_appimage.AppImage') != '' && github.event.inputs.artifact_destination != 'none'
@@ -282,9 +267,96 @@ jobs:
path: release/termix_linux_armv7l_portable.tar.gz
retention-days: 30
- name: Install Flatpak builder and dependencies
run: |
sudo apt-get update
sudo apt-get install -y flatpak flatpak-builder imagemagick
- name: Add Flathub repository
run: |
sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
- name: Install Flatpak runtime and SDK
run: |
sudo flatpak install -y flathub org.freedesktop.Platform//24.08
sudo flatpak install -y flathub org.freedesktop.Sdk//24.08
sudo flatpak install -y flathub org.electronjs.Electron2.BaseApp//24.08
- name: Get version for Flatpak
id: flatpak-version
run: |
VERSION=$(node -p "require('./package.json').version")
RELEASE_DATE=$(date +%Y-%m-%d)
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
- name: Prepare Flatpak files
run: |
VERSION="${{ steps.flatpak-version.outputs.version }}"
RELEASE_DATE="${{ steps.flatpak-version.outputs.release_date }}"
CHECKSUM_X64=$(sha256sum "release/termix_linux_x64_appimage.AppImage" | awk '{print $1}')
CHECKSUM_ARM64=$(sha256sum "release/termix_linux_arm64_appimage.AppImage" | awk '{print $1}')
mkdir -p flatpak-build
cp flatpak/com.karmaa.termix.yml flatpak-build/
cp flatpak/com.karmaa.termix.desktop flatpak-build/
cp flatpak/com.karmaa.termix.metainfo.xml flatpak-build/
cp public/icon.svg flatpak-build/com.karmaa.termix.svg
convert public/icon.png -resize 256x256 flatpak-build/icon-256.png
convert public/icon.png -resize 128x128 flatpak-build/icon-128.png
cd flatpak-build
sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage|file://$(realpath ../release/termix_linux_x64_appimage.AppImage)|g" com.karmaa.termix.yml
sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage|file://$(realpath ../release/termix_linux_arm64_appimage.AppImage)|g" com.karmaa.termix.yml
sed -i "s/CHECKSUM_X64_PLACEHOLDER/$CHECKSUM_X64/g" com.karmaa.termix.yml
sed -i "s/CHECKSUM_ARM64_PLACEHOLDER/$CHECKSUM_ARM64/g" com.karmaa.termix.yml
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" com.karmaa.termix.metainfo.xml
- name: Build Flatpak bundle
run: |
cd flatpak-build
flatpak-builder --repo=repo --force-clean --disable-rofiles-fuse build-dir com.karmaa.termix.yml
# Determine the architecture
ARCH=$(uname -m)
if [ "$ARCH" = "x86_64" ]; then
FLATPAK_ARCH="x86_64"
elif [ "$ARCH" = "aarch64" ]; then
FLATPAK_ARCH="aarch64"
else
FLATPAK_ARCH="$ARCH"
fi
# Build bundle for the current architecture
flatpak build-bundle repo ../release/termix_linux_flatpak.flatpak com.karmaa.termix --runtime-repo=https://flathub.org/repo/flathub.flatpakrepo
- name: Create flatpakref file
run: |
VERSION="${{ steps.flatpak-version.outputs.version }}"
cp flatpak/com.karmaa.termix.flatpakref release/
sed -i "s|VERSION_PLACEHOLDER|release-${VERSION}-tag|g" release/com.karmaa.termix.flatpakref
- name: Upload Flatpak bundle
uses: actions/upload-artifact@v4
if: hashFiles('release/termix_linux_flatpak.flatpak') != '' && github.event.inputs.artifact_destination != 'none'
with:
name: termix_linux_flatpak
path: release/termix_linux_flatpak.flatpak
retention-days: 30
- name: Upload Flatpakref
uses: actions/upload-artifact@v4
if: hashFiles('release/com.karmaa.termix.flatpakref') != '' && github.event.inputs.artifact_destination != 'none'
with:
name: termix_linux_flatpakref
path: release/com.karmaa.termix.flatpakref
retention-days: 30
build-macos:
runs-on: macos-latest
if: github.event.inputs.build_type == 'macos' || github.event.inputs.build_type == 'all'
if: (github.event.inputs.build_type == 'macos' || github.event.inputs.build_type == 'all') && github.event.inputs.artifact_destination != 'submit'
needs: []
permissions:
contents: write
@@ -425,11 +497,6 @@ jobs:
export GH_TOKEN="${{ secrets.GITHUB_TOKEN }}"
npx electron-builder --mac dmg --universal --x64 --arm64 --publish never
- name: List release directory
if: steps.check_certs.outputs.has_certs == 'true'
run: |
ls -R release/ || echo "Release directory not found"
- name: Upload macOS MAS PKG
if: steps.check_certs.outputs.has_certs == 'true' && hashFiles('release/termix_macos_universal_mas.pkg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release' || github.event.inputs.artifact_destination == 'submit')
uses: actions/upload-artifact@v4
@@ -463,42 +530,51 @@ jobs:
path: release/termix_macos_arm64_dmg.dmg
retention-days: 30
- name: Check for App Store Connect API credentials
if: steps.check_certs.outputs.has_certs == 'true'
id: check_asc_creds
- name: Get version for Homebrew
id: homebrew-version
run: |
if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
echo "has_credentials=true" >> $GITHUB_OUTPUT
fi
VERSION=$(node -p "require('./package.json').version")
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: Setup Ruby for Fastlane
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
uses: ruby/setup-ruby@v1
- name: Generate Homebrew Cask
if: hashFiles('release/termix_macos_universal_dmg.dmg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release')
run: |
VERSION="${{ steps.homebrew-version.outputs.version }}"
DMG_PATH="release/termix_macos_universal_dmg.dmg"
CHECKSUM=$(shasum -a 256 "$DMG_PATH" | awk '{print $1}')
mkdir -p homebrew-generated
cp Casks/termix.rb homebrew-generated/termix.rb
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-generated/termix.rb
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-generated/termix.rb
sed -i '' "s|version \".*\"|version \"$VERSION\"|g" homebrew-generated/termix.rb
sed -i '' "s|sha256 \".*\"|sha256 \"$CHECKSUM\"|g" homebrew-generated/termix.rb
sed -i '' "s|release-[0-9.]*-tag|release-$VERSION-tag|g" homebrew-generated/termix.rb
- name: Upload Homebrew Cask as artifact
uses: actions/upload-artifact@v4
if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'file'
with:
ruby-version: "3.2"
bundler-cache: false
name: termix_macos_homebrew_cask
path: homebrew-generated/termix.rb
retention-days: 30
- name: Install Fastlane
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
- name: Upload Homebrew Cask to release
if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'release'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gem install fastlane -N
VERSION="${{ steps.homebrew-version.outputs.version }}"
RELEASE_TAG="release-$VERSION-tag"
- name: Deploy to App Store Connect (TestFlight)
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
run: |
PKG_FILE=$(find release -name "*.pkg" -type f | head -n 1)
if [ -z "$PKG_FILE" ]; then
gh release list --repo ${{ github.repository }} --limit 100 | grep -q "$RELEASE_TAG" || {
echo "Release $RELEASE_TAG not found"
exit 1
fi
}
mkdir -p ~/private_keys
echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
xcrun altool --upload-app -f "$PKG_FILE" \
--type macos \
--apiKey "${{ secrets.APPLE_KEY_ID }}" \
--apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
continue-on-error: true
gh release upload "$RELEASE_TAG" homebrew-generated/termix.rb --repo ${{ github.repository }} --clobber
- name: Clean up keychains
if: always()
@@ -508,8 +584,7 @@ jobs:
submit-to-chocolatey:
runs-on: windows-latest
if: github.event.inputs.artifact_destination == 'submit'
needs: [build-windows]
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == '')
permissions:
contents: read
@@ -525,20 +600,25 @@ jobs:
$VERSION = (Get-Content package.json | ConvertFrom-Json).version
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
- name: Download Windows x64 MSI artifact
uses: actions/download-artifact@v4
with:
name: termix_windows_x64_msi
path: artifact
- name: Get MSI file info
- name: Download and prepare MSI info from public release
id: msi-info
run: |
$VERSION = "${{ steps.package-version.outputs.version }}"
$MSI_FILE = Get-ChildItem -Path artifact -Filter "*.msi" | Select-Object -First 1
$MSI_NAME = $MSI_FILE.Name
$CHECKSUM = (Get-FileHash -Path $MSI_FILE.FullName -Algorithm SHA256).Hash
$MSI_NAME = "termix_windows_x64_msi.msi"
$DOWNLOAD_URL = "https://github.com/Termix-SSH/Termix/releases/download/release-$($VERSION)-tag/$($MSI_NAME)"
Write-Host "Downloading from $DOWNLOAD_URL"
New-Item -ItemType Directory -Force -Path "release_asset"
$DOWNLOAD_PATH = "release_asset\$MSI_NAME"
try {
Invoke-WebRequest -Uri $DOWNLOAD_URL -OutFile $DOWNLOAD_PATH -UseBasicParsing
} catch {
Write-Error "Failed to download MSI from $DOWNLOAD_URL. Please ensure the release and asset exist."
exit 1
}
$CHECKSUM = (Get-FileHash -Path $DOWNLOAD_PATH -Algorithm SHA256).Hash
echo "msi_name=$MSI_NAME" >> $env:GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $env:GITHUB_OUTPUT
@@ -609,8 +689,8 @@ jobs:
submit-to-flatpak:
runs-on: ubuntu-latest
if: github.event.inputs.artifact_destination == 'submit'
needs: [build-linux]
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == '')
needs: []
permissions:
contents: read
@@ -628,30 +708,27 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
- name: Download Linux x64 AppImage artifact
uses: actions/download-artifact@v4
with:
name: termix_linux_x64_appimage
path: artifact-x64
- name: Download Linux arm64 AppImage artifact
uses: actions/download-artifact@v4
with:
name: termix_linux_arm64_appimage
path: artifact-arm64
- name: Get AppImage file info
- name: Download and prepare AppImage info from public release
id: appimage-info
run: |
VERSION="${{ steps.package-version.outputs.version }}"
mkdir -p release_assets
APPIMAGE_X64_FILE=$(find artifact-x64 -name "*.AppImage" -type f | head -n 1)
APPIMAGE_X64_NAME=$(basename "$APPIMAGE_X64_FILE")
CHECKSUM_X64=$(sha256sum "$APPIMAGE_X64_FILE" | awk '{print $1}')
APPIMAGE_X64_NAME="termix_linux_x64_appimage.AppImage"
URL_X64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_X64_NAME"
PATH_X64="release_assets/$APPIMAGE_X64_NAME"
echo "Downloading x64 AppImage from $URL_X64"
curl -L -o "$PATH_X64" "$URL_X64"
chmod +x "$PATH_X64"
CHECKSUM_X64=$(sha256sum "$PATH_X64" | awk '{print $1}')
APPIMAGE_ARM64_FILE=$(find artifact-arm64 -name "*.AppImage" -type f | head -n 1)
APPIMAGE_ARM64_NAME=$(basename "$APPIMAGE_ARM64_FILE")
CHECKSUM_ARM64=$(sha256sum "$APPIMAGE_ARM64_FILE" | awk '{print $1}')
APPIMAGE_ARM64_NAME="termix_linux_arm64_appimage.AppImage"
URL_ARM64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_ARM64_NAME"
PATH_ARM64="release_assets/$APPIMAGE_ARM64_NAME"
echo "Downloading arm64 AppImage from $URL_ARM64"
curl -L -o "$PATH_ARM64" "$URL_ARM64"
chmod +x "$PATH_ARM64"
CHECKSUM_ARM64=$(sha256sum "$PATH_ARM64" | awk '{print $1}')
echo "appimage_x64_name=$APPIMAGE_X64_NAME" >> $GITHUB_OUTPUT
echo "checksum_x64=$CHECKSUM_X64" >> $GITHUB_OUTPUT
@@ -690,10 +767,6 @@ jobs:
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak-submission/com.karmaa.termix.metainfo.xml
- name: List submission files
run: |
ls -la flatpak-submission/
- name: Upload Flatpak submission as artifact
uses: actions/upload-artifact@v4
with:
@@ -703,8 +776,8 @@ jobs:
submit-to-homebrew:
runs-on: macos-latest
if: github.event.inputs.artifact_destination == 'submit'
needs: [build-macos]
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'macos')
needs: []
permissions:
contents: read
@@ -720,19 +793,28 @@ jobs:
VERSION=$(node -p "require('./package.json').version")
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: Download macOS Universal DMG artifact
uses: actions/download-artifact@v4
with:
name: termix_macos_universal_dmg
path: artifact
- name: Get DMG file info
- name: Download and prepare DMG info from public release
id: dmg-info
run: |
VERSION="${{ steps.package-version.outputs.version }}"
DMG_FILE=$(find artifact -name "*.dmg" -type f | head -n 1)
DMG_NAME=$(basename "$DMG_FILE")
CHECKSUM=$(shasum -a 256 "$DMG_FILE" | awk '{print $1}')
DMG_NAME="termix_macos_universal_dmg.dmg"
URL="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$DMG_NAME"
mkdir -p release_asset
DOWNLOAD_PATH="release_asset/$DMG_NAME"
echo "Downloading DMG from $URL"
if command -v curl &> /dev/null; then
curl -L -o "$DOWNLOAD_PATH" "$URL"
elif command -v wget &> /dev/null; then
wget -O "$DOWNLOAD_PATH" "$URL"
else
echo "Neither curl nor wget is available, installing curl"
brew install curl
curl -L -o "$DOWNLOAD_PATH" "$URL"
fi
CHECKSUM=$(shasum -a 256 "$DOWNLOAD_PATH" | awk '{print $1}')
echo "dmg_name=$DMG_NAME" >> $GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT
@@ -745,24 +827,15 @@ jobs:
mkdir -p homebrew-submission/Casks/t
cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb
cp homebrew/README.md homebrew-submission/
cp Casks/termix.rb homebrew-submission/Casks/t/termix.rb
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb
- name: Verify Cask syntax
run: |
if ! command -v brew &> /dev/null; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
ruby -c homebrew-submission/Casks/t/termix.rb
- name: List submission files
run: |
find homebrew-submission -type f
- name: Upload Homebrew submission as artifact
uses: actions/upload-artifact@v4
with:
@@ -790,10 +863,6 @@ jobs:
env:
GH_TOKEN: ${{ github.token }}
- name: Display artifact structure
run: |
ls -R artifacts/
- name: Upload artifacts to latest release
run: |
cd artifacts
@@ -809,3 +878,130 @@ jobs:
done
env:
GH_TOKEN: ${{ github.token }}
submit-to-testflight:
runs-on: macos-latest
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'macos')
needs: []
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v5
with:
fetch-depth: 1
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "npm"
- name: Install dependencies
run: |
for i in 1 2 3;
do
if npm ci; then
break
else
if [ $i -eq 3 ]; then
exit 1
fi
sleep 10
fi
done
npm install --force @rollup/rollup-darwin-arm64
npm install dmg-license
- name: Check for Code Signing Certificates
id: check_certs
run: |
if [ -n "${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.MAC_P12_PASSWORD }}" ]; then
echo "has_certs=true" >> $GITHUB_OUTPUT
fi
- name: Import Code Signing Certificates
if: steps.check_certs.outputs.has_certs == 'true'
env:
MAC_BUILD_CERTIFICATE_BASE64: ${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}
MAC_INSTALLER_CERTIFICATE_BASE64: ${{ secrets.MAC_INSTALLER_CERTIFICATE_BASE64 }}
MAC_P12_PASSWORD: ${{ secrets.MAC_P12_PASSWORD }}
MAC_KEYCHAIN_PASSWORD: ${{ secrets.MAC_KEYCHAIN_PASSWORD }}
run: |
APP_CERT_PATH=$RUNNER_TEMP/app_certificate.p12
INSTALLER_CERT_PATH=$RUNNER_TEMP/installer_certificate.p12
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
echo -n "$MAC_BUILD_CERTIFICATE_BASE64" | base64 --decode -o $APP_CERT_PATH
if [ -n "$MAC_INSTALLER_CERTIFICATE_BASE64" ]; then
echo -n "$MAC_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $INSTALLER_CERT_PATH
fi
security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security import $APP_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
if [ -f "$INSTALLER_CERT_PATH" ]; then
security import $INSTALLER_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
fi
security list-keychain -d user -s $KEYCHAIN_PATH
security find-identity -v -p codesigning $KEYCHAIN_PATH
- name: Build macOS App Store Package
if: steps.check_certs.outputs.has_certs == 'true'
env:
ELECTRON_BUILDER_ALLOW_UNRESOLVED_DEPENDENCIES: true
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
CURRENT_VERSION=$(node -p "require('./package.json').version")
BUILD_VERSION="${{ github.run_number }}"
npm run build && npx electron-builder --mac mas --universal --config.buildVersion="$BUILD_VERSION"
- name: Check for App Store Connect API credentials
id: check_asc_creds
run: |
if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
echo "has_credentials=true" >> $GITHUB_OUTPUT
fi
- name: Setup Ruby for Fastlane
if: steps.check_asc_creds.outputs.has_credentials == 'true'
uses: ruby/setup-ruby@v1
with:
ruby-version: "3.2"
bundler-cache: false
- name: Install Fastlane
if: steps.check_asc_creds.outputs.has_credentials == 'true'
run: |
gem install fastlane -N
- name: Deploy to App Store Connect (TestFlight)
if: steps.check_asc_creds.outputs.has_credentials == 'true'
run: |
PKG_FILE=$(find release -name "termix_macos_universal_mas.pkg" -type f | head -n 1)
if [ -z "$PKG_FILE" ]; then
echo "PKG file not found, exiting."
exit 1
fi
mkdir -p ~/private_keys
echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
xcrun altool --upload-app -f "$PKG_FILE" \
--type macos \
--apiKey "${{ secrets.APPLE_KEY_ID }}" \
--apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
continue-on-error: true
- name: Clean up keychains
if: always()
run: |
security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true

32
.github/workflows/openapi.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: Generate OpenAPI Specification
on:
workflow_dispatch:
jobs:
generate-openapi:
name: Generate OpenAPI JSON
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Generate OpenAPI specification
run: npm run generate:openapi
- name: Upload OpenAPI artifact
uses: actions/upload-artifact@v4
with:
name: openapi-spec
path: openapi.json
retention-days: 90

1
.gitignore vendored
View File

@@ -28,3 +28,4 @@ dist-ssr
/.mcp.json
/nul
/.vscode/
/CLAUDE.md

View File

@@ -1,8 +1,8 @@
cask "termix" do
version "VERSION_PLACEHOLDER"
sha256 "CHECKSUM_PLACEHOLDER"
version "1.10.0"
sha256 "327c5026006c949f992447835aa6754113f731065b410bedbfa5da5af7cb2386"
url "https://github.com/Termix-SSH/Termix/releases/download/release-#{version}-tag/termix_macos_universal_#{version}_dmg.dmg"
url "https://github.com/Termix-SSH/Termix/releases/download/release-#{version}-tag/termix_macos_universal_dmg.dmg"
name "Termix"
desc "Web-based server management platform with SSH terminal, tunneling, and file editing"
homepage "https://github.com/Termix-SSH/Termix"

View File

@@ -1,61 +0,0 @@
# Termix Download Links
## Windows
| Architecture | Type | Download Link |
| ------------ | -------- | ---------------------------------------------------------------------------------------------------------- |
| x64 | NSIS | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_nsis.exe) |
| x64 | MSI | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_msi.msi) |
| x64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_portable.zip) |
| ia32 | NSIS | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_nsis.exe) |
| ia32 | MSI | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_msi.msi) |
| ia32 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_portable.zip) |
## Linux
| Architecture | Type | Download Link |
| ------------ | -------- | --------------------------------------------------------------------------------------------------------------- |
| x64 | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_appimage.AppImage) |
| x64 | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_deb.deb) |
| x64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_portable.tar.gz) |
| arm64 | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_appimage.AppImage) |
| arm64 | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_deb.deb) |
| arm64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_portable.tar.gz) |
| armv7l | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_appimage.AppImage) |
| armv7l | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_deb.deb) |
| armv7l | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_portable.tar.gz) |
## macOS
| Architecture | Type | Download Link |
| ------------ | ------------- | -------------------------------------------------------------------------------------------------------- |
| Universal | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_universal_dmg.dmg) |
| Universal | Mac App Store | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_universal_mas.pkg) |
| x64 | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_x64_dmg.dmg) |
| arm64 | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_arm64_dmg.dmg) |
---
## All Platforms - Complete Download List
| Platform | Architecture | Type | Download Link |
| -------- | ------------ | ------------- | --------------------------------------------------------------------------------------------------------------- |
| Windows | x64 | NSIS | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_nsis.exe) |
| Windows | x64 | MSI | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_msi.msi) |
| Windows | x64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_x64_portable.zip) |
| Windows | ia32 | NSIS | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_nsis.exe) |
| Windows | ia32 | MSI | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_msi.msi) |
| Windows | ia32 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_windows_ia32_portable.zip) |
| Linux | x64 | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_appimage.AppImage) |
| Linux | x64 | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_deb.deb) |
| Linux | x64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_x64_portable.tar.gz) |
| Linux | arm64 | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_appimage.AppImage) |
| Linux | arm64 | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_deb.deb) |
| Linux | arm64 | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_arm64_portable.tar.gz) |
| Linux | armv7l | AppImage | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_appimage.AppImage) |
| Linux | armv7l | DEB | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_deb.deb) |
| Linux | armv7l | Portable | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_linux_armv7l_portable.tar.gz) |
| macOS | Universal | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_universal_dmg.dmg) |
| macOS | Universal | Mac App Store | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_universal_mas.pkg) |
| macOS | x64 | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_x64_dmg.dmg) |
| macOS | arm64 | DMG | [Download](https://github.com/Termix-SSH/Termix/releases/latest/download/termix_macos_arm64_dmg.dmg) |

View File

@@ -51,17 +51,22 @@ Termix 是一个开源、永久免费、自托管的一体化服务器管理平
- **SSH 终端访问** - 功能齐全的终端,具有分屏支持(最多 4 个面板)和类似浏览器的选项卡系统。包括对自定义终端的支持,包括常见终端主题、字体和其他组件
- **SSH 隧道管理** - 创建和管理 SSH 隧道,具有自动重新连接和健康监控功能
- **远程文件管理器** - 直接在远程服务器上管理文件,支持查看和编辑代码、图像、音频和视频。无缝上传、下载、重命名、删除和移动文件
- **Docker 管理** - 启动、停止、暂停、删除容器。查看容器统计信息。使用 docker exec 终端控制容器。它不是用来替代 Portainer 或 Dockge而是用于简单管理你的容器而不是创建它们。
- **SSH 主机管理器** - 保存、组织和管理您的 SSH 连接,支持标签和文件夹,并轻松保存可重用的登录信息,同时能够自动部署 SSH 密钥
- **服务器统计** - 在任何 SSH 服务器上查看 CPU、内存和磁盘使用情况以及网络、正常运行时间和系统信息
- **仪表板** - 在仪表板上一目了然地查看服务器信息
- **用户认证** - 安全的用户管理,具有管理员控制以及 OIDC 和 2FA (TOTP) 支持。查看所有平台上的活动用户会话并撤销权限。
- **数据库加密** - 后端存储为加密的 SQLite 数据库文件
- **RBAC** - 创建角色并在用户/角色之间共享主机
- **用户认证** - 安全的用户管理,具有管理员控制以及 OIDC 和 2FA (TOTP) 支持。查看所有平台上的活动用户会话并撤销权限。将您的 OIDC/本地帐户链接在一起。
- **数据库加密** - 后端存储为加密的 SQLite 数据库文件。查看[文档](https://docs.termix.site/security)了解更多信息。
- **数据导出/导入** - 导出和导入 SSH 主机、凭据和文件管理器数据
- **自动 SSL 设置** - 内置 SSL 证书生成和管理,支持 HTTPS 重定向
- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面
- **语言** - 内置支持英语、中文、德语和葡萄牙语
- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面。可选择基于深色或浅色模式的用户界面。
- **语言** - 内置支持约 30 种语言(通过 Google 翻译批量翻译,结果可能有所不同)
- **平台支持** - 可作为 Web 应用程序、桌面应用程序Windows、Linux 和 macOS以及适用于 iOS 和 Android 的专用移动/平板电脑应用程序。
- **SSH 工具** - 创建可重用的命令片段,单击即可执行。在多个打开的终端上同时运行一个命令。
- **命令历史** - 自动完成并查看以前运行的 SSH 命令
- **命令面板** - 双击左 Shift 键可快速使用键盘访问 SSH 连接
- **SSH 功能丰富** - 支持跳板机、warpgate、基于 TOTP 的连接、SOCKS5、密码自动填充等。
# 计划功能
@@ -75,16 +80,16 @@ Termix 是一个开源、永久免费、自托管的一体化服务器管理平
- Windowsx64/ia32
- 便携版
- MSI 安装程序
- Chocolatey 软件包管理器
- Chocolatey 软件包管理器(即将推出)
- Linuxx64/ia32
- 便携版
- AppImage
- Deb
- Flatpak
- Flatpak(即将推出)
- macOSx64/ia32 on v12.0+
- Apple App Store
- Apple App Store(即将推出)
- DMG
- Homebrew
- Homebrew(即将推出)
- iOS/iPadOSv15.1+
- Apple App Store
- ISO
@@ -137,6 +142,12 @@ volumes:
<p align="center">
<img src="./repo-images/Image 7.png" width="400" alt="Termix Demo 7"/>
<img src="./repo-images/Image 8.png" width="400" alt="Termix Demo 8"/>
</p>
<p align="center">
<img src="./repo-images/Image 9.png" width="400" alt="Termix Demo 9"/>
<img src="./repo-images/Image 10.png" width="400" alt="Termix Demo 110"/>
</p>
<p align="center">
@@ -144,7 +155,7 @@ volumes:
你的浏览器不支持 video 标签。
</video>
</p>
视频和图像可能已过时。
某些视频和图像可能已过时或可能无法完美展示功能
# 许可证

View File

@@ -16,17 +16,6 @@
<small style="color: #666;">Achieved on September 1st, 2025</small>
</p>
#### Top Technologies
[![React Badge](https://img.shields.io/badge/-React-61DBFB?style=flat-square&labelColor=black&logo=react&logoColor=61DBFB)](#)
[![TypeScript Badge](https://img.shields.io/badge/-TypeScript-3178C6?style=flat-square&labelColor=black&logo=typescript&logoColor=3178C6)](#)
[![Node.js Badge](https://img.shields.io/badge/-Node.js-3C873A?style=flat-square&labelColor=black&logo=node.js&logoColor=3C873A)](#)
[![Vite Badge](https://img.shields.io/badge/-Vite-646CFF?style=flat-square&labelColor=black&logo=vite&logoColor=646CFF)](#)
[![Tailwind CSS Badge](https://img.shields.io/badge/-TailwindCSS-38B2AC?style=flat-square&labelColor=black&logo=tailwindcss&logoColor=38B2AC)](#)
[![Docker Badge](https://img.shields.io/badge/-Docker-2496ED?style=flat-square&labelColor=black&logo=docker&logoColor=2496ED)](#)
[![SQLite Badge](https://img.shields.io/badge/-SQLite-003B57?style=flat-square&labelColor=black&logo=sqlite&logoColor=003B57)](#)
[![Radix UI Badge](https://img.shields.io/badge/-Radix%20UI-161618?style=flat-square&labelColor=black&logo=radixui&logoColor=161618)](#)
<br />
<p align="center">
<a href="https://github.com/Termix-SSH/Termix">
@@ -45,7 +34,7 @@ If you would like, you can support the project here!\
Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a multi-platform
solution for managing your servers and infrastructure through a single, intuitive interface. Termix offers SSH terminal
access, SSH tunneling capabilities, and remote file management, with many more tools to come. Termix is the perfect
access, SSH tunneling capabilities, remote file management, and many other tools. Termix is the perfect
free and self-hosted alternative to Termius available for all platforms.
# Features
@@ -53,17 +42,22 @@ free and self-hosted alternative to Termius available for all platforms.
- **SSH Terminal Access** - Full-featured terminal with split-screen support (up to 4 panels) with a browser-like tab system. Includes support for customizing the terminal including common terminal themes, fonts, and other components
- **SSH Tunnel Management** - Create and manage SSH tunnels with automatic reconnection and health monitoring
- **Remote File Manager** - Manage files directly on remote servers with support for viewing and editing code, images, audio, and video. Upload, download, rename, delete, and move files seamlessly
- **Docker Management** - Start, stop, pause, remove containers. View container stats. Control container using docker exec terminal. It was not made to replace Portainer or Dockge but rather to simply manage your containers compared to creating them.
- **SSH Host Manager** - Save, organize, and manage your SSH connections with tags and folders, and easily save reusable login info while being able to automate the deployment of SSH keys
- **Server Stats** - View CPU, memory, and disk usage along with network, uptime, and system information on any SSH server
- **Dashboard** - View server information at a glance on your dashboard
- **User Authentication** - Secure user management with admin controls and OIDC and 2FA (TOTP) support. View active user sessions across all platforms and revoke permissions.
- **Database Encryption** - Backend stored as encrypted SQLite database files
- **RBAC** - Create roles and share hosts across users/roles
- **User Authentication** - Secure user management with admin controls and OIDC and 2FA (TOTP) support. View active user sessions across all platforms and revoke permissions. Link your OIDC/Local accounts together.
- **Database Encryption** - Backend stored as encrypted SQLite database files. View [docs](https://docs.termix.site/security) for more.
- **Data Export/Import** - Export and import SSH hosts, credentials, and file manager data
- **Automatic SSL Setup** - Built-in SSL certificate generation and management with HTTPS redirects
- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn
- **Languages** - Built-in support for English, Chinese, German, and Portuguese
- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn. Choose between dark or light mode based UI.
- **Languages** - Built-in support ~30 languages (bulk translated via Google Translate, results may vary ofc)
- **Platform Support** - Available as a web app, desktop application (Windows, Linux, and macOS), and dedicated mobile/tablet app for iOS and Android.
- **SSH Tools** - Create reusable command snippets that execute with a single click. Run one command simultaneously across multiple open terminals.
- **Command History** - Auto-complete and view previously ran SSH commands
- **Command Palette** - Double tap left shift to quickly access SSH connections with your keyboard
- **SSH Feature Rich** - Supports jump hosts, warpgate, TOTP based connections, SOCKS5, password autofill, etc.
# Planned Features
@@ -80,6 +74,7 @@ Supported Devices:
- Chocolatey Package Manager
- Linux (x64/ia32)
- Portable
- AUR
- AppImage
- Deb
- Flatpak
@@ -115,13 +110,25 @@ volumes:
driver: local
```
# Sponsors
<p align="left">
<a href="https://www.digitalocean.com/">
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" height="50" alt="DigitalOcean">
</a>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://crowdin.com/">
<img src="https://support.crowdin.com/assets/logos/core-logo/svg/crowdin-core-logo-cDark.svg" height="50" alt="Crowdin">
</a>
</p>
# Support
If you need help or want to request a feature with Termix, visit the [Issues](https://github.com/Termix-SSH/Support/issues) page, log in, and press `New Issue`.
Please be as detailed as possible in your issue, preferably written in English. You can also join the [Discord](https://discord.gg/jVQGdvHDrf) server and visit the support
channel, however, response times may be longer.
# Show-off
# Screenshots
<p align="center">
<img src="./repo-images/Image 1.png" width="400" alt="Termix Demo 1"/>
@@ -140,6 +147,12 @@ channel, however, response times may be longer.
<p align="center">
<img src="./repo-images/Image 7.png" width="400" alt="Termix Demo 7"/>
<img src="./repo-images/Image 8.png" width="400" alt="Termix Demo 8"/>
</p>
<p align="center">
<img src="./repo-images/Image 9.png" width="400" alt="Termix Demo 9"/>
<img src="./repo-images/Image 10.png" width="400" alt="Termix Demo 110"/>
</p>
<p align="center">
@@ -147,7 +160,7 @@ channel, however, response times may be longer.
Your browser does not support the video tag.
</video>
</p>
Videos and images may be out of date.
Some videos and images may be out of date or may not perfectly showcase features.
# License

3
crowdin.yml Normal file
View File

@@ -0,0 +1,3 @@
files:
- source: /src/locales/en.json
translation: /src/locales/translated/%two_letters_code%.json

View File

@@ -19,7 +19,7 @@ COPY . .
RUN find public/fonts -name "*.ttf" ! -name "*Regular.ttf" ! -name "*Bold.ttf" ! -name "*Italic.ttf" -delete
RUN npm cache clean --force && \
npm run build
NODE_OPTIONS="--max-old-space-size=2048" npm run build
# Stage 3: Build backend
FROM deps AS backend-builder
@@ -53,16 +53,18 @@ ENV DATA_DIR=/app/data \
RUN apt-get update && apt-get install -y nginx gettext-base openssl && \
rm -rf /var/lib/apt/lists/* && \
mkdir -p /app/data /app/uploads && \
chown -R node:node /app/data /app/uploads && \
useradd -r -s /bin/false nginx
mkdir -p /app/data /app/uploads /app/nginx /app/nginx/logs /app/nginx/cache /app/nginx/client_body && \
chown -R node:node /app && \
chmod 755 /app/data /app/uploads /app/nginx && \
touch /app/nginx/nginx.conf && \
chown node:node /app/nginx/nginx.conf
COPY docker/nginx.conf /etc/nginx/nginx.conf
COPY docker/nginx-https.conf /etc/nginx/nginx-https.conf
COPY docker/nginx.conf /app/nginx/nginx.conf.template
COPY docker/nginx-https.conf /app/nginx/nginx-https.conf.template
COPY --chown=nginx:nginx --from=frontend-builder /app/dist /usr/share/nginx/html
COPY --chown=nginx:nginx --from=frontend-builder /app/src/locales /usr/share/nginx/html/locales
COPY --chown=nginx:nginx --from=frontend-builder /app/public/fonts /usr/share/nginx/html/fonts
COPY --chown=node:node --from=frontend-builder /app/dist /app/html
COPY --chown=node:node --from=frontend-builder /app/src/locales /app/html/locales
COPY --chown=node:node --from=frontend-builder /app/public/fonts /app/html/fonts
COPY --chown=node:node --from=production-deps /app/node_modules /app/node_modules
COPY --chown=node:node --from=backend-builder /app/dist/backend ./dist/backend
@@ -72,6 +74,12 @@ VOLUME ["/app/data"]
EXPOSE ${PORT} 30001 30002 30003 30004 30005 30006
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
CMD node -e "require('http').get('http://localhost:30001/health', (r) => process.exit(r.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))"
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
USER node
CMD ["/entrypoint.sh"]

View File

@@ -11,24 +11,21 @@ echo "Configuring web UI to run on port: $PORT"
if [ "$ENABLE_SSL" = "true" ]; then
echo "SSL enabled - using HTTPS configuration with redirect"
NGINX_CONF_SOURCE="/etc/nginx/nginx-https.conf"
NGINX_CONF_SOURCE="/app/nginx/nginx-https.conf.template"
else
echo "SSL disabled - using HTTP-only configuration (default)"
NGINX_CONF_SOURCE="/etc/nginx/nginx.conf"
NGINX_CONF_SOURCE="/app/nginx/nginx.conf.template"
fi
envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /etc/nginx/nginx.conf.tmp
mv /etc/nginx/nginx.conf.tmp /etc/nginx/nginx.conf
envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /app/nginx/nginx.conf
mkdir -p /app/data /app/uploads
chown -R node:node /app/data /app/uploads
chmod 755 /app/data /app/uploads
chmod 755 /app/data /app/uploads 2>/dev/null || true
if [ "$ENABLE_SSL" = "true" ]; then
echo "Checking SSL certificate configuration..."
mkdir -p /app/data/ssl
chown -R node:node /app/data/ssl
chmod 755 /app/data/ssl
chmod 755 /app/data/ssl 2>/dev/null || true
DOMAIN=${SSL_DOMAIN:-localhost}
@@ -84,7 +81,6 @@ EOF
chmod 600 /app/data/ssl/termix.key
chmod 644 /app/data/ssl/termix.crt
chown node:node /app/data/ssl/termix.key /app/data/ssl/termix.crt
rm -f /app/data/ssl/openssl.conf
@@ -93,7 +89,7 @@ EOF
fi
echo "Starting nginx..."
nginx
nginx -c /app/nginx/nginx.conf
echo "Starting backend services..."
cd /app
@@ -110,11 +106,7 @@ else
echo "Warning: package.json not found"
fi
if command -v su-exec > /dev/null 2>&1; then
su-exec node node dist/backend/backend/starter.js
else
su -s /bin/sh node -c "node dist/backend/backend/starter.js"
fi
node dist/backend/backend/starter.js
echo "All services started"

View File

@@ -1,11 +1,24 @@
worker_processes 1;
master_process off;
pid /app/nginx/nginx.pid;
error_log /app/nginx/logs/error.log warn;
events {
worker_connections 1024;
}
http {
include mime.types;
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log /app/nginx/logs/access.log;
client_body_temp_path /app/nginx/client_body;
proxy_temp_path /app/nginx/proxy_temp;
fastcgi_temp_path /app/nginx/fastcgi_temp;
uwsgi_temp_path /app/nginx/uwsgi_temp;
scgi_temp_path /app/nginx/scgi_temp;
sendfile on;
keepalive_timeout 65;
client_header_timeout 300s;
@@ -37,9 +50,17 @@ http {
add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always;
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
root /app/html;
expires 1y;
add_header Cache-Control "public, immutable";
try_files $uri =404;
}
location / {
root /usr/share/nginx/html;
root /app/html;
index index.html index.htm;
try_files $uri $uri/ /index.html;
}
location ~* \.map$ {
@@ -93,6 +114,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/rbac(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/credentials(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -115,6 +145,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/terminal(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/database(/.*)?$ {
client_max_body_size 5G;
client_body_timeout 300s;
@@ -162,6 +201,18 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location /ssh/quick-connect {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /ssh/ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -249,6 +300,15 @@ http {
proxy_buffering off;
}
location ~ ^/network-topology(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /health {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -274,6 +334,10 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 60s;
proxy_send_timeout 60s;
proxy_read_timeout 60s;
}
location ~ ^/uptime(/.*)?$ {
@@ -294,9 +358,54 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/dashboard/preferences(/.*)?$ {
proxy_pass http://127.0.0.1:30006;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ^~ /docker/console/ {
proxy_pass http://127.0.0.1:30008/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 86400s;
proxy_send_timeout 86400s;
proxy_connect_timeout 10s;
proxy_buffering off;
proxy_request_buffering off;
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
}
location ~ ^/docker(/.*)?$ {
proxy_pass http://127.0.0.1:30007;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 60s;
proxy_send_timeout 300s;
proxy_read_timeout 300s;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
root /app/html;
}
}
}

View File

@@ -1,11 +1,24 @@
worker_processes 1;
master_process off;
pid /app/nginx/nginx.pid;
error_log /app/nginx/logs/error.log warn;
events {
worker_connections 1024;
}
http {
include mime.types;
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log /app/nginx/logs/access.log;
client_body_temp_path /app/nginx/client_body;
proxy_temp_path /app/nginx/proxy_temp;
fastcgi_temp_path /app/nginx/fastcgi_temp;
uwsgi_temp_path /app/nginx/uwsgi_temp;
scgi_temp_path /app/nginx/scgi_temp;
sendfile on;
keepalive_timeout 65;
client_header_timeout 300s;
@@ -27,14 +40,14 @@ http {
add_header X-XSS-Protection "1; mode=block" always;
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
root /usr/share/nginx/html;
root /app/html;
expires 1y;
add_header Cache-Control "public, immutable";
try_files $uri =404;
}
location / {
root /usr/share/nginx/html;
root /app/html;
index index.html index.htm;
try_files $uri $uri/ /index.html;
}
@@ -90,6 +103,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/rbac(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/credentials(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -112,6 +134,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/terminal(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/database(/.*)?$ {
client_max_body_size 5G;
client_body_timeout 300s;
@@ -159,6 +190,18 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location /ssh/quick-connect {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /ssh/ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -246,6 +289,15 @@ http {
proxy_buffering off;
}
location ~ ^/network-topology(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /health {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -271,6 +323,10 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 60s;
proxy_send_timeout 60s;
proxy_read_timeout 60s;
}
location ~ ^/uptime(/.*)?$ {
@@ -291,9 +347,54 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
location ~ ^/dashboard/preferences(/.*)?$ {
proxy_pass http://127.0.0.1:30006;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location ^~ /docker/console/ {
proxy_pass http://127.0.0.1:30008/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 86400s;
proxy_send_timeout 86400s;
proxy_connect_timeout 10s;
proxy_buffering off;
proxy_request_buffering off;
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
}
location ~ ^/docker(/.*)?$ {
proxy_pass http://127.0.0.1:30007;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 60s;
proxy_send_timeout 300s;
proxy_read_timeout 300s;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
root /app/html;
}
}
}

View File

@@ -124,5 +124,6 @@
"ITSAppUsesNonExemptEncryption": false,
"NSAppleEventsUsageDescription": "Termix needs access to control other applications for terminal operations."
}
}
},
"generateUpdatesFilesForAllChannels": true
}

View File

@@ -11,13 +11,9 @@ const fs = require("fs");
const os = require("os");
if (process.platform === "linux") {
app.commandLine.appendSwitch("--no-sandbox");
app.commandLine.appendSwitch("--disable-setuid-sandbox");
app.commandLine.appendSwitch("--disable-dev-shm-usage");
app.commandLine.appendSwitch("--ozone-platform-hint=auto");
app.disableHardwareAcceleration();
app.commandLine.appendSwitch("--disable-gpu");
app.commandLine.appendSwitch("--disable-gpu-compositing");
app.commandLine.appendSwitch("--enable-features=VaapiVideoDecoder");
}
app.commandLine.appendSwitch("--ignore-certificate-errors");
@@ -395,6 +391,48 @@ ipcMain.handle("save-server-config", (event, config) => {
}
});
ipcMain.handle("get-setting", (event, key) => {
try {
const userDataPath = app.getPath("userData");
const settingsPath = path.join(userDataPath, "settings.json");
if (!fs.existsSync(settingsPath)) {
return null;
}
const settingsData = fs.readFileSync(settingsPath, "utf8");
const settings = JSON.parse(settingsData);
return settings[key] !== undefined ? settings[key] : null;
} catch (error) {
console.error("Error reading setting:", error);
return null;
}
});
ipcMain.handle("set-setting", (event, key, value) => {
try {
const userDataPath = app.getPath("userData");
const settingsPath = path.join(userDataPath, "settings.json");
if (!fs.existsSync(userDataPath)) {
fs.mkdirSync(userDataPath, { recursive: true });
}
let settings = {};
if (fs.existsSync(settingsPath)) {
const settingsData = fs.readFileSync(settingsPath, "utf8");
settings = JSON.parse(settingsData);
}
settings[key] = value;
fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
return { success: true };
} catch (error) {
console.error("Error saving setting:", error);
return { success: false, error: error.message };
}
});
ipcMain.handle("test-server-connection", async (event, serverUrl) => {
try {
const https = require("https");

View File

@@ -2,26 +2,14 @@ const { contextBridge, ipcRenderer } = require("electron");
contextBridge.exposeInMainWorld("electronAPI", {
getAppVersion: () => ipcRenderer.invoke("get-app-version"),
getPlatform: () => ipcRenderer.invoke("get-platform"),
checkElectronUpdate: () => ipcRenderer.invoke("check-electron-update"),
getServerConfig: () => ipcRenderer.invoke("get-server-config"),
saveServerConfig: (config) =>
ipcRenderer.invoke("save-server-config", config),
testServerConnection: (serverUrl) =>
ipcRenderer.invoke("test-server-connection", serverUrl),
showSaveDialog: (options) => ipcRenderer.invoke("show-save-dialog", options),
showOpenDialog: (options) => ipcRenderer.invoke("show-open-dialog", options),
onUpdateAvailable: (callback) => ipcRenderer.on("update-available", callback),
onUpdateDownloaded: (callback) =>
ipcRenderer.on("update-downloaded", callback),
removeAllListeners: (channel) => ipcRenderer.removeAllListeners(channel),
isElectron: true,
isDev: process.env.NODE_ENV === "development",
getSetting: (key) => ipcRenderer.invoke("get-setting", key),
setSetting: (key, value) => ipcRenderer.invoke("set-setting", key, value),
invoke: (channel, ...args) => ipcRenderer.invoke(channel, ...args),
});

View File

@@ -1,7 +1,7 @@
[Desktop Entry]
Name=Termix
Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
Exec=termix %U
Exec=run.sh %U
Icon=com.karmaa.termix
Terminal=false
Type=Application

View File

@@ -0,0 +1,12 @@
[Flatpak Ref]
Name=Termix
Branch=stable
Title=Termix - SSH Server Management Platform
IsRuntime=false
Url=https://github.com/Termix-SSH/Termix/releases/download/VERSION_PLACEHOLDER/termix_linux_flatpak.flatpak
GPGKey=
RuntimeRepo=https://flathub.org/repo/flathub.flatpakrepo
Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
Description=Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides SSH terminal access, tunneling capabilities, and remote file management.
Icon=https://raw.githubusercontent.com/Termix-SSH/Termix/main/public/icon.png
Homepage=https://github.com/Termix-SSH/Termix

View File

@@ -5,7 +5,7 @@
<summary>Web-based server management platform with SSH terminal, tunneling, and file editing</summary>
<metadata_license>CC0-1.0</metadata_license>
<project_license>GPL-3.0-or-later</project_license>
<project_license>Apache-2.0</project_license>
<developer_name>bugattiguy527</developer_name>

View File

@@ -1,10 +1,10 @@
app-id: com.karmaa.termix
runtime: org.freedesktop.Platform
runtime-version: "23.08"
runtime-version: "24.08"
sdk: org.freedesktop.Sdk
base: org.electronjs.Electron2.BaseApp
base-version: "23.08"
command: termix
base-version: "24.08"
command: run.sh
separate-locales: false
finish-args:
@@ -16,8 +16,11 @@ finish-args:
- --device=dri
- --filesystem=home
- --socket=ssh-auth
- --talk-name=org.freedesktop.Notifications
- --socket=session-bus
- --talk-name=org.freedesktop.secrets
- --env=ELECTRON_TRASH=gio
- --env=XCURSOR_PATH=/run/host/user-share/icons:/run/host/share/icons
- --env=ELECTRON_OZONE_PLATFORM_HINT=auto
modules:
- name: termix
@@ -30,6 +33,21 @@ modules:
- cp -r squashfs-root/resources /app/bin/
- cp -r squashfs-root/locales /app/bin/ || true
- cp squashfs-root/*.so /app/bin/ || true
- cp squashfs-root/*.pak /app/bin/ || true
- cp squashfs-root/*.bin /app/bin/ || true
- cp squashfs-root/*.dat /app/bin/ || true
- cp squashfs-root/*.json /app/bin/ || true
- |
cat > run.sh << 'EOF'
#!/bin/bash
export TMPDIR="$XDG_RUNTIME_DIR/app/$FLATPAK_ID"
exec zypak-wrapper /app/bin/termix "$@"
EOF
- chmod +x run.sh
- install -Dm755 run.sh /app/bin/run.sh
- install -Dm644 com.karmaa.termix.desktop /app/share/applications/com.karmaa.termix.desktop
- install -Dm644 com.karmaa.termix.metainfo.xml /app/share/metainfo/com.karmaa.termix.metainfo.xml
@@ -40,14 +58,14 @@ modules:
sources:
- type: file
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_VERSION_PLACEHOLDER_appimage.AppImage
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage
sha256: CHECKSUM_X64_PLACEHOLDER
dest-filename: termix.AppImage
only-arches:
- x86_64
- type: file
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_VERSION_PLACEHOLDER_appimage.AppImage
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage
sha256: CHECKSUM_ARM64_PLACEHOLDER
dest-filename: termix.AppImage
only-arches:

View File

@@ -1,34 +0,0 @@
#!/bin/bash
set -e
VERSION="$1"
CHECKSUM="$2"
RELEASE_DATE="$3"
if [ -z "$VERSION" ] || [ -z "$CHECKSUM" ] || [ -z "$RELEASE_DATE" ]; then
echo "Usage: $0 <version> <checksum> <release-date>"
echo "Example: $0 1.8.0 abc123... 2025-10-26"
exit 1
fi
echo "Preparing Flatpak submission for version $VERSION"
cp public/icon.svg flatpak/com.karmaa.termix.svg
echo "✓ Copied SVG icon"
if command -v convert &> /dev/null; then
convert public/icon.png -resize 256x256 flatpak/icon-256.png
convert public/icon.png -resize 128x128 flatpak/icon-128.png
echo "✓ Generated PNG icons"
else
cp public/icon.png flatpak/icon-256.png
cp public/icon.png flatpak/icon-128.png
echo "⚠ ImageMagick not found, using original icon"
fi
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.yml
sed -i "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" flatpak/com.karmaa.termix.yml
echo "✓ Updated manifest with version $VERSION"
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak/com.karmaa.termix.metainfo.xml

View File

@@ -4,6 +4,13 @@
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<!-- PWA Meta Tags -->
<meta name="theme-color" content="#09090b" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
<meta name="apple-mobile-web-app-title" content="Termix" />
<link rel="apple-touch-icon" href="/icons/512x512.png" />
<link rel="manifest" href="/manifest.json" />
<title>Termix</title>
<style>
.hide-scrollbar {

File diff suppressed because it is too large Load Diff

1404
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"name": "termix",
"private": true,
"version": "1.8.0",
"version": "1.10.1",
"description": "A web-based server management platform with SSH terminal, tunneling, and file editing capabilities",
"author": "Karmaa",
"main": "electron/main.cjs",
@@ -17,6 +17,7 @@
"build": "vite build && tsc -p tsconfig.node.json",
"build:backend": "tsc -p tsconfig.node.json",
"dev:backend": "tsc -p tsconfig.node.json && node ./dist/backend/backend/starter.js",
"generate:openapi": "tsc -p tsconfig.node.json && node ./dist/backend/backend/swagger.js",
"preview": "vite preview",
"electron:dev": "concurrently \"npm run dev\" \"powershell -c \\\"Start-Sleep -Seconds 5\\\" && electron .\"",
"build:win-portable": "npm run build && electron-builder --win --dir",
@@ -35,6 +36,7 @@
"@hookform/resolvers": "^5.1.1",
"@monaco-editor/react": "^4.7.0",
"@radix-ui/react-accordion": "^1.2.11",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.2",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.15",
@@ -45,18 +47,20 @@
"@radix-ui/react-select": "^2.2.5",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.5",
"@radix-ui/react-tabs": "^1.1.12",
"@radix-ui/react-tooltip": "^1.2.8",
"@tailwindcss/vite": "^4.1.14",
"@types/bcryptjs": "^2.4.6",
"@types/cookie-parser": "^1.4.9",
"@types/cytoscape": "^3.21.9",
"@types/jszip": "^3.4.0",
"@types/multer": "^2.0.0",
"@types/qrcode": "^1.5.5",
"@types/speakeasy": "^2.0.10",
"@uiw/codemirror-extensions-langs": "^4.24.1",
"@uiw/codemirror-theme-github": "^4.25.4",
"@uiw/react-codemirror": "^4.24.1",
"@xterm/addon-clipboard": "^0.1.0",
"@xterm/addon-fit": "^0.10.0",
@@ -70,11 +74,14 @@
"chalk": "^4.1.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"cytoscape": "^3.33.1",
"dotenv": "^17.2.0",
"drizzle-orm": "^0.44.3",
"express": "^5.1.0",
"i18n-auto-translation": "^2.2.3",
"i18next": "^25.4.2",
"i18next-browser-languagedetector": "^8.2.0",
"jose": "^5.2.3",
@@ -87,6 +94,7 @@
"node-fetch": "^3.3.2",
"qrcode": "^1.5.4",
"react": "^19.1.0",
"react-cytoscapejs": "^2.0.0",
"react-dom": "^19.1.0",
"react-h5-audio-player": "^3.10.1",
"react-hook-form": "^7.60.0",
@@ -102,6 +110,7 @@
"react-xtermjs": "^1.0.10",
"recharts": "^3.2.1",
"remark-gfm": "^4.0.1",
"socks": "^2.8.7",
"sonner": "^2.0.7",
"speakeasy": "^2.0.0",
"ssh2": "^1.16.0",
@@ -136,6 +145,7 @@
"husky": "^9.1.7",
"lint-staged": "^16.2.3",
"prettier": "3.6.2",
"swagger-jsdoc": "^6.2.8",
"typescript": "~5.9.2",
"typescript-eslint": "^8.40.0",
"vite": "^7.1.5"

40
public/manifest.json Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "Termix",
"short_name": "Termix",
"description": "A web-based server management platform with SSH terminal, tunneling, and file editing capabilities",
"theme_color": "#09090b",
"background_color": "#09090b",
"display": "standalone",
"orientation": "any",
"scope": "/",
"start_url": "/",
"icons": [
{
"src": "/icons/48x48.png",
"sizes": "48x48",
"type": "image/png"
},
{
"src": "/icons/64x64.png",
"sizes": "64x64",
"type": "image/png"
},
{
"src": "/icons/128x128.png",
"sizes": "128x128",
"type": "image/png"
},
{
"src": "/icons/256x256.png",
"sizes": "256x256",
"type": "image/png"
},
{
"src": "/icons/512x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "any maskable"
}
],
"categories": ["utilities", "developer", "productivity"]
}

120
public/sw.js Normal file
View File

@@ -0,0 +1,120 @@
/**
* Termix Service Worker
* Handles caching for offline PWA support
*/
const CACHE_NAME = "termix-v1";
const STATIC_ASSETS = [
"/",
"/index.html",
"/manifest.json",
"/favicon.ico",
"/icons/48x48.png",
"/icons/128x128.png",
"/icons/256x256.png",
"/icons/512x512.png",
];
// Install event - cache static assets
self.addEventListener("install", (event) => {
event.waitUntil(
caches
.open(CACHE_NAME)
.then((cache) => {
console.log("[SW] Caching static assets");
return cache.addAll(STATIC_ASSETS);
})
.then(() => {
// Activate immediately without waiting
return self.skipWaiting();
}),
);
});
// Activate event - clean up old caches
self.addEventListener("activate", (event) => {
event.waitUntil(
caches
.keys()
.then((cacheNames) => {
return Promise.all(
cacheNames
.filter((name) => name !== CACHE_NAME)
.map((name) => {
console.log("[SW] Deleting old cache:", name);
return caches.delete(name);
}),
);
})
.then(() => {
// Take control of all pages immediately
return self.clients.claim();
}),
);
});
// Fetch event - serve from cache, fall back to network
self.addEventListener("fetch", (event) => {
const { request } = event;
const url = new URL(request.url);
// Skip non-GET requests
if (request.method !== "GET") {
return;
}
// Skip API requests - these must be online
if (url.pathname.startsWith("/api/") || url.pathname.startsWith("/ws")) {
return;
}
// Skip cross-origin requests
if (url.origin !== self.location.origin) {
return;
}
// For navigation requests (HTML), use network-first
if (request.mode === "navigate") {
event.respondWith(
fetch(request)
.then((response) => {
// Clone and cache the response
const responseClone = response.clone();
caches.open(CACHE_NAME).then((cache) => {
cache.put(request, responseClone);
});
return response;
})
.catch(() => {
// Offline: return cached index.html
return caches.match("/index.html");
}),
);
return;
}
// For all other assets, use cache-first
event.respondWith(
caches.match(request).then((cachedResponse) => {
if (cachedResponse) {
return cachedResponse;
}
// Not in cache, fetch from network
return fetch(request).then((response) => {
// Don't cache non-successful responses
if (!response || response.status !== 200 || response.type !== "basic") {
return response;
}
// Clone and cache the response
const responseClone = response.clone();
caches.open(CACHE_NAME).then((cache) => {
cache.put(request, responseClone);
});
return response;
});
}),
);
});

BIN
repo-images/Image 10.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 158 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 407 KiB

After

Width:  |  Height:  |  Size: 355 KiB

BIN
repo-images/Image 8.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 227 KiB

BIN
repo-images/Image 9.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 153 KiB

View File

@@ -1,9 +1,14 @@
import express from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import { getDb } from "./database/db/index.js";
import { recentActivity, sshData } from "./database/db/schema.js";
import { eq, and, desc } from "drizzle-orm";
import { getDb, DatabaseSaveTrigger } from "./database/db/index.js";
import {
recentActivity,
sshData,
hostAccess,
dashboardPreferences,
} from "./database/db/schema.js";
import { eq, and, desc, or, sql } from "drizzle-orm";
import { dashboardLogger } from "./utils/logger.js";
import { SimpleDBOps } from "./utils/simple-db-ops.js";
import { AuthManager } from "./utils/auth-manager.js";
@@ -15,7 +20,7 @@ const authManager = AuthManager.getInstance();
const serverStartTime = Date.now();
const activityRateLimiter = new Map<string, number>();
const RATE_LIMIT_MS = 1000; // 1 second window
const RATE_LIMIT_MS = 1000;
app.use(
cors({
@@ -58,6 +63,31 @@ app.use(express.json({ limit: "1mb" }));
app.use(authManager.createAuthMiddleware());
/**
* @openapi
* /uptime:
* get:
* summary: Get server uptime
* description: Returns the uptime of the server in various formats.
* tags:
* - Dashboard
* responses:
* 200:
* description: Server uptime information.
* content:
* application/json:
* schema:
* type: object
* properties:
* uptimeMs:
* type: number
* uptimeSeconds:
* type: number
* formatted:
* type: string
* 500:
* description: Failed to get uptime.
*/
app.get("/uptime", async (req, res) => {
try {
const uptimeMs = Date.now() - serverStartTime;
@@ -77,6 +107,28 @@ app.get("/uptime", async (req, res) => {
}
});
/**
* @openapi
* /activity/recent:
* get:
* summary: Get recent activity
* description: Fetches the most recent activities for the authenticated user.
* tags:
* - Dashboard
* parameters:
* - in: query
* name: limit
* schema:
* type: integer
* description: The maximum number of activities to return.
* responses:
* 200:
* description: A list of recent activities.
* 401:
* description: Session expired.
* 500:
* description: Failed to get recent activity.
*/
app.get("/activity/recent", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -108,6 +160,40 @@ app.get("/activity/recent", async (req, res) => {
}
});
/**
* @openapi
* /activity/log:
* post:
* summary: Log a new activity
* description: Logs a new user activity, such as accessing a terminal or file manager. This endpoint is rate-limited.
* tags:
* - Dashboard
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* type:
* type: string
* enum: [terminal, file_manager, server_stats, tunnel, docker]
* hostId:
* type: integer
* hostName:
* type: string
* responses:
* 200:
* description: Activity logged successfully or rate-limited.
* 400:
* description: Invalid request body.
* 401:
* description: Session expired.
* 404:
* description: Host not found or access denied.
* 500:
* description: Failed to log activity.
*/
app.post("/activity/log", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -127,9 +213,18 @@ app.post("/activity/log", async (req, res) => {
});
}
if (type !== "terminal" && type !== "file_manager") {
if (
![
"terminal",
"file_manager",
"server_stats",
"tunnel",
"docker",
].includes(type)
) {
return res.status(400).json({
error: "Invalid activity type. Must be 'terminal' or 'file_manager'",
error:
"Invalid activity type. Must be 'terminal', 'file_manager', 'server_stats', 'tunnel', or 'docker'",
});
}
@@ -155,7 +250,7 @@ app.post("/activity/log", async (req, res) => {
entriesToDelete.forEach((key) => activityRateLimiter.delete(key));
}
const hosts = await SimpleDBOps.select(
const ownedHosts = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
@@ -164,8 +259,19 @@ app.post("/activity/log", async (req, res) => {
userId,
);
if (hosts.length === 0) {
return res.status(404).json({ error: "Host not found" });
if (ownedHosts.length === 0) {
const sharedHosts = await getDb()
.select()
.from(hostAccess)
.where(
and(eq(hostAccess.hostId, hostId), eq(hostAccess.userId, userId)),
);
if (sharedHosts.length === 0) {
return res
.status(404)
.json({ error: "Host not found or access denied" });
}
}
const result = (await SimpleDBOps.insert(
@@ -204,6 +310,22 @@ app.post("/activity/log", async (req, res) => {
}
});
/**
* @openapi
* /activity/reset:
* delete:
* summary: Reset recent activity
* description: Clears all recent activity for the authenticated user.
* tags:
* - Dashboard
* responses:
* 200:
* description: Recent activity cleared.
* 401:
* description: Session expired.
* 500:
* description: Failed to reset activity.
*/
app.delete("/activity/reset", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -233,6 +355,166 @@ app.delete("/activity/reset", async (req, res) => {
}
});
/**
* @openapi
* /dashboard/preferences:
* get:
* summary: Get dashboard layout preferences
* description: Returns the user's customized dashboard layout settings. If no preferences exist, returns default layout.
* tags:
* - Dashboard
* responses:
* 200:
* description: Dashboard preferences retrieved
* content:
* application/json:
* schema:
* type: object
* properties:
* cards:
* type: array
* items:
* type: object
* properties:
* id:
* type: string
* enabled:
* type: boolean
* order:
* type: integer
* gridColumns:
* type: integer
* 401:
* description: Session expired
* 500:
* description: Failed to get preferences
*/
app.get("/dashboard/preferences", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
if (!SimpleDBOps.isUserDataUnlocked(userId)) {
return res.status(401).json({
error: "Session expired - please log in again",
code: "SESSION_EXPIRED",
});
}
const preferences = await getDb()
.select()
.from(dashboardPreferences)
.where(eq(dashboardPreferences.userId, userId));
if (preferences.length === 0) {
const defaultLayout = {
cards: [
{ id: "server_overview", enabled: true, order: 1 },
{ id: "recent_activity", enabled: true, order: 2 },
{ id: "network_graph", enabled: false, order: 3 },
{ id: "quick_actions", enabled: true, order: 4 },
{ id: "server_stats", enabled: true, order: 5 },
],
gridColumns: 2,
};
return res.json(defaultLayout);
}
const layout = JSON.parse(preferences[0].layout as string);
res.json(layout);
} catch (err) {
dashboardLogger.error("Failed to get dashboard preferences", err);
res.status(500).json({ error: "Failed to get dashboard preferences" });
}
});
/**
* @openapi
* /dashboard/preferences:
* post:
* summary: Save dashboard layout preferences
* description: Saves or updates the user's customized dashboard layout settings.
* tags:
* - Dashboard
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* cards:
* type: array
* items:
* type: object
* properties:
* id:
* type: string
* enabled:
* type: boolean
* order:
* type: integer
* gridColumns:
* type: integer
* responses:
* 200:
* description: Preferences saved successfully
* 400:
* description: Invalid request body
* 401:
* description: Session expired
* 500:
* description: Failed to save preferences
*/
app.post("/dashboard/preferences", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
if (!SimpleDBOps.isUserDataUnlocked(userId)) {
return res.status(401).json({
error: "Session expired - please log in again",
code: "SESSION_EXPIRED",
});
}
const { cards, gridColumns } = req.body;
if (!cards || !Array.isArray(cards) || typeof gridColumns !== "number") {
return res.status(400).json({
error:
"Invalid request body. Expected { cards: Array, gridColumns: number }",
});
}
const layout = JSON.stringify({ cards, gridColumns });
const existing = await getDb()
.select()
.from(dashboardPreferences)
.where(eq(dashboardPreferences.userId, userId));
if (existing.length > 0) {
await getDb()
.update(dashboardPreferences)
.set({ layout, updatedAt: sql`CURRENT_TIMESTAMP` })
.where(eq(dashboardPreferences.userId, userId));
} else {
await getDb().insert(dashboardPreferences).values({ userId, layout });
}
await DatabaseSaveTrigger.triggerSave("dashboard_preferences_updated");
dashboardLogger.success("Dashboard preferences saved", {
operation: "save_dashboard_preferences",
userId,
});
res.json({ success: true, message: "Dashboard preferences saved" });
} catch (err) {
dashboardLogger.error("Failed to save dashboard preferences", err);
res.status(500).json({ error: "Failed to save dashboard preferences" });
}
});
const PORT = 30006;
app.listen(PORT, async () => {
try {

View File

@@ -7,6 +7,9 @@ import sshRoutes from "./routes/ssh.js";
import alertRoutes from "./routes/alerts.js";
import credentialsRoutes from "./routes/credentials.js";
import snippetsRoutes from "./routes/snippets.js";
import terminalRoutes from "./routes/terminal.js";
import networkTopologyRoutes from "./routes/network-topology.js";
import rbacRoutes from "./routes/rbac.js";
import cors from "cors";
import fetch from "node-fetch";
import fs from "fs";
@@ -21,6 +24,7 @@ import { DatabaseMigration } from "../utils/database-migration.js";
import { UserDataExport } from "../utils/user-data-export.js";
import { AutoSSLSetup } from "../utils/auto-ssl-setup.js";
import { eq, and } from "drizzle-orm";
import { parseUserAgent } from "../utils/user-agent-parser.js";
import {
users,
sshData,
@@ -202,10 +206,46 @@ app.use(bodyParser.urlencoded({ limit: "1gb", extended: true }));
app.use(bodyParser.raw({ limit: "5gb", type: "application/octet-stream" }));
app.use(cookieParser());
/**
* @openapi
* /health:
* get:
* summary: Health check
* description: Returns the health status of the server.
* tags:
* - General
* responses:
* 200:
* description: Server is healthy.
* content:
* application/json:
* schema:
* type: object
* properties:
* status:
* type: string
* example: ok
*/
app.get("/health", (req, res) => {
res.json({ status: "ok" });
});
/**
* @openapi
* /version:
* get:
* summary: Get version information
* description: Returns the local and remote version of the application.
* tags:
* - General
* responses:
* 200:
* description: Version information.
* 404:
* description: Local version not set.
* 500:
* description: Fetch error.
*/
app.get("/version", authenticateJWT, async (req, res) => {
let localVersion = process.env.VERSION;
@@ -304,6 +344,31 @@ app.get("/version", authenticateJWT, async (req, res) => {
}
});
/**
* @openapi
* /releases/rss:
* get:
* summary: Get releases in RSS format
* description: Returns the latest releases from the GitHub repository in an RSS-like JSON format.
* tags:
* - General
* parameters:
* - in: query
* name: page
* schema:
* type: integer
* description: The page number of the releases to fetch.
* - in: query
* name: per_page
* schema:
* type: integer
* description: The number of releases to fetch per page.
* responses:
* 200:
* description: Releases in RSS format.
* 500:
* description: Failed to generate RSS format.
*/
app.get("/releases/rss", authenticateJWT, async (req, res) => {
try {
const page = parseInt(req.query.page as string) || 1;
@@ -360,6 +425,20 @@ app.get("/releases/rss", authenticateJWT, async (req, res) => {
}
});
/**
* @openapi
* /encryption/status:
* get:
* summary: Get encryption status
* description: Returns the security status of the application.
* tags:
* - Encryption
* responses:
* 200:
* description: Security status.
* 500:
* description: Failed to get security status.
*/
app.get("/encryption/status", requireAdmin, async (req, res) => {
try {
const securityStatus = {
@@ -381,6 +460,20 @@ app.get("/encryption/status", requireAdmin, async (req, res) => {
}
});
/**
* @openapi
* /encryption/initialize:
* post:
* summary: Initialize security system
* description: Initializes the security system for the application.
* tags:
* - Encryption
* responses:
* 200:
* description: Security system initialized successfully.
* 500:
* description: Failed to initialize security system.
*/
app.post("/encryption/initialize", requireAdmin, async (req, res) => {
try {
const authManager = AuthManager.getInstance();
@@ -404,6 +497,20 @@ app.post("/encryption/initialize", requireAdmin, async (req, res) => {
}
});
/**
* @openapi
* /encryption/regenerate:
* post:
* summary: Regenerate JWT secret
* description: Regenerates the system JWT secret. This will invalidate all existing JWT tokens.
* tags:
* - Encryption
* responses:
* 200:
* description: System JWT secret regenerated.
* 500:
* description: Failed to regenerate JWT secret.
*/
app.post("/encryption/regenerate", requireAdmin, async (req, res) => {
try {
apiLogger.warn("System JWT secret regenerated via API", {
@@ -425,6 +532,20 @@ app.post("/encryption/regenerate", requireAdmin, async (req, res) => {
}
});
/**
* @openapi
* /encryption/regenerate-jwt:
* post:
* summary: Regenerate JWT secret
* description: Regenerates the JWT secret. This will invalidate all existing JWT tokens.
* tags:
* - Encryption
* responses:
* 200:
* description: New JWT secret generated.
* 500:
* description: Failed to regenerate JWT secret.
*/
app.post("/encryption/regenerate-jwt", requireAdmin, async (req, res) => {
try {
apiLogger.warn("JWT secret regenerated via API", {
@@ -445,6 +566,33 @@ app.post("/encryption/regenerate-jwt", requireAdmin, async (req, res) => {
}
});
/**
* @openapi
* /database/export:
* post:
* summary: Export user data
* description: Exports the user's data as a SQLite database file.
* tags:
* - Database
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* password:
* type: string
* responses:
* 200:
* description: User data exported successfully.
* 400:
* description: Password required for export.
* 401:
* description: Invalid password.
* 500:
* description: Failed to export user data.
*/
app.post("/database/export", authenticateJWT, async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -456,8 +604,12 @@ app.post("/database/export", authenticateJWT, async (req, res) => {
code: "PASSWORD_REQUIRED",
});
}
const unlocked = await authManager.authenticateUser(userId, password);
const deviceInfo = parseUserAgent(req);
const unlocked = await authManager.authenticateUser(
userId,
password,
deviceInfo.type,
);
if (!unlocked) {
return res.status(401).json({ error: "Invalid password" });
}
@@ -891,6 +1043,36 @@ app.post("/database/export", authenticateJWT, async (req, res) => {
}
});
/**
* @openapi
* /database/import:
* post:
* summary: Import user data
* description: Imports user data from a SQLite database file.
* tags:
* - Database
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* properties:
* file:
* type: string
* format: binary
* password:
* type: string
* responses:
* 200:
* description: Incremental import completed successfully.
* 400:
* description: No file uploaded or password required for import.
* 401:
* description: Invalid password.
* 500:
* description: Failed to import SQLite data.
*/
app.post(
"/database/import",
authenticateJWT,
@@ -904,6 +1086,7 @@ app.post(
const userId = (req as AuthenticatedRequest).userId;
const { password } = req.body;
const mainDb = getDb();
const deviceInfo = parseUserAgent(req);
const userRecords = await mainDb
.select()
@@ -924,12 +1107,19 @@ app.post(
});
}
const unlocked = await authManager.authenticateUser(userId, password);
const unlocked = await authManager.authenticateUser(
userId,
password,
deviceInfo.type,
);
if (!unlocked) {
return res.status(401).json({ error: "Invalid password" });
}
} else if (!DataCrypto.getUserDataKey(userId)) {
const oidcUnlocked = await authManager.authenticateOIDCUser(userId);
const oidcUnlocked = await authManager.authenticateOIDCUser(
userId,
deviceInfo.type,
);
if (!oidcUnlocked) {
return res.status(403).json({
error: "Failed to unlock user data with SSO credentials",
@@ -947,7 +1137,10 @@ app.post(
let userDataKey = DataCrypto.getUserDataKey(userId);
if (!userDataKey && isOidcUser) {
const oidcUnlocked = await authManager.authenticateOIDCUser(userId);
const oidcUnlocked = await authManager.authenticateOIDCUser(
userId,
deviceInfo.type,
);
if (oidcUnlocked) {
userDataKey = DataCrypto.getUserDataKey(userId);
}
@@ -1344,6 +1537,31 @@ app.post(
},
);
/**
* @openapi
* /database/export/preview:
* post:
* summary: Preview user data export
* description: Generates a preview of the user data export, including statistics about the data.
* tags:
* - Database
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* scope:
* type: string
* includeCredentials:
* type: boolean
* responses:
* 200:
* description: Export preview generated successfully.
* 500:
* description: Failed to generate export preview.
*/
app.post("/database/export/preview", authenticateJWT, async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -1379,6 +1597,33 @@ app.post("/database/export/preview", authenticateJWT, async (req, res) => {
}
});
/**
* @openapi
* /database/restore:
* post:
* summary: Restore database from backup
* description: Restores the database from an encrypted backup file.
* tags:
* - Database
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* backupPath:
* type: string
* targetPath:
* type: string
* responses:
* 200:
* description: Database restored successfully.
* 400:
* description: Backup path is required or invalid encrypted backup file.
* 500:
* description: Database restore failed.
*/
app.post("/database/restore", requireAdmin, async (req, res) => {
try {
const { backupPath, targetPath } = req.body;
@@ -1418,6 +1663,9 @@ app.use("/ssh", sshRoutes);
app.use("/alerts", alertRoutes);
app.use("/credentials", credentialsRoutes);
app.use("/snippets", snippetsRoutes);
app.use("/terminal", terminalRoutes);
app.use("/network-topology", networkTopologyRoutes);
app.use("/rbac", rbacRoutes);
app.use(
(
@@ -1457,6 +1705,20 @@ async function initializeSecurity() {
}
}
/**
* @openapi
* /database/migration/status:
* get:
* summary: Get database migration status
* description: Returns the status of the database migration.
* tags:
* - Database
* responses:
* 200:
* description: Migration status.
* 500:
* description: Failed to get migration status.
*/
app.get(
"/database/migration/status",
authenticateJWT,
@@ -1480,13 +1742,13 @@ app.get(
if (status.hasUnencryptedDb) {
try {
unencryptedSize = fs.statSync(dbPath).size;
} catch {}
} catch (error) {}
}
if (status.hasEncryptedDb) {
try {
encryptedSize = fs.statSync(encryptedDbPath).size;
} catch {}
} catch (error) {}
}
res.json({
@@ -1510,6 +1772,20 @@ app.get(
},
);
/**
* @openapi
* /database/migration/history:
* get:
* summary: Get database migration history
* description: Returns the history of database migrations.
* tags:
* - Database
* responses:
* 200:
* description: Migration history.
* 500:
* description: Failed to get migration history.
*/
app.get(
"/database/migration/history",
authenticateJWT,

View File

@@ -95,6 +95,26 @@ async function initializeDatabaseAsync(): Promise<void> {
databaseKeyLength: process.env.DATABASE_KEY?.length || 0,
});
try {
const diagnosticInfo =
DatabaseFileEncryption.getDiagnosticInfo(encryptedDbPath);
databaseLogger.error(
"Database encryption diagnostic completed - check logs above for details",
null,
{
operation: "db_encryption_diagnostic_completed",
filesConsistent: diagnosticInfo.validation.filesConsistent,
sizeMismatch: diagnosticInfo.validation.sizeMismatch,
},
);
} catch (diagError) {
databaseLogger.warn("Failed to generate diagnostic information", {
operation: "db_diagnostic_failed",
error:
diagError instanceof Error ? diagError.message : "Unknown error",
});
}
throw new Error(
`Database decryption failed: ${error instanceof Error ? error.message : "Unknown error"}. This prevents data loss.`,
);
@@ -120,6 +140,8 @@ async function initializeCompleteDatabase(): Promise<void> {
sqlite = memoryDatabase;
sqlite.exec("PRAGMA foreign_keys = ON");
db = drizzle(sqlite, { schema });
sqlite.exec(`
@@ -157,7 +179,7 @@ async function initializeCompleteDatabase(): Promise<void> {
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
expires_at TEXT NOT NULL,
last_active_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS ssh_data (
@@ -179,16 +201,24 @@ async function initializeCompleteDatabase(): Promise<void> {
enable_tunnel INTEGER NOT NULL DEFAULT 1,
tunnel_connections TEXT,
enable_file_manager INTEGER NOT NULL DEFAULT 1,
enable_docker INTEGER NOT NULL DEFAULT 0,
default_path TEXT,
autostart_password TEXT,
autostart_key TEXT,
autostart_key_password TEXT,
force_keyboard_interactive TEXT,
stats_config TEXT,
docker_config TEXT,
terminal_config TEXT,
notes TEXT,
use_socks5 INTEGER,
socks5_host TEXT,
socks5_port INTEGER,
socks5_username TEXT,
socks5_password TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS file_manager_recent (
@@ -198,8 +228,8 @@ async function initializeCompleteDatabase(): Promise<void> {
name TEXT NOT NULL,
path TEXT NOT NULL,
last_opened TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS file_manager_pinned (
@@ -209,8 +239,8 @@ async function initializeCompleteDatabase(): Promise<void> {
name TEXT NOT NULL,
path TEXT NOT NULL,
pinned_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS file_manager_shortcuts (
@@ -220,8 +250,8 @@ async function initializeCompleteDatabase(): Promise<void> {
name TEXT NOT NULL,
path TEXT NOT NULL,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS dismissed_alerts (
@@ -229,7 +259,7 @@ async function initializeCompleteDatabase(): Promise<void> {
user_id TEXT NOT NULL,
alert_id TEXT NOT NULL,
dismissed_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS ssh_credentials (
@@ -249,7 +279,7 @@ async function initializeCompleteDatabase(): Promise<void> {
last_used TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS ssh_credential_usage (
@@ -258,9 +288,9 @@ async function initializeCompleteDatabase(): Promise<void> {
host_id INTEGER NOT NULL,
user_id TEXT NOT NULL,
used_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (credential_id) REFERENCES ssh_credentials (id),
FOREIGN KEY (host_id) REFERENCES ssh_data (id),
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (credential_id) REFERENCES ssh_credentials (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS snippets (
@@ -271,7 +301,18 @@ async function initializeCompleteDatabase(): Promise<void> {
description TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS ssh_folders (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
name TEXT NOT NULL,
color TEXT,
icon TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS recent_activity (
@@ -279,10 +320,95 @@ async function initializeCompleteDatabase(): Promise<void> {
user_id TEXT NOT NULL,
type TEXT NOT NULL,
host_id INTEGER NOT NULL,
host_name TEXT NOT NULL,
host_name TEXT,
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS command_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
host_id INTEGER NOT NULL,
command TEXT NOT NULL,
executed_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS host_access (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT,
role_id INTEGER,
granted_by TEXT NOT NULL,
permission_level TEXT NOT NULL DEFAULT 'use',
expires_at TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_accessed_at TEXT,
access_count INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
display_name TEXT NOT NULL,
description TEXT,
is_system INTEGER NOT NULL DEFAULT 0,
permissions TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS user_roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
role_id INTEGER NOT NULL,
granted_by TEXT,
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, role_id),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
);
CREATE TABLE IF NOT EXISTS audit_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
username TEXT NOT NULL,
action TEXT NOT NULL,
resource_type TEXT NOT NULL,
resource_id TEXT,
resource_name TEXT,
details TEXT,
ip_address TEXT,
user_agent TEXT,
success INTEGER NOT NULL,
error_message TEXT,
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS session_recordings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT NOT NULL,
access_id INTEGER,
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
ended_at TEXT,
duration INTEGER,
commands TEXT,
dangerous_actions TEXT,
recording_path TEXT,
terminated_by_owner INTEGER DEFAULT 0,
termination_reason TEXT,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
);
`);
@@ -343,14 +469,14 @@ const addColumnIfNotExists = (
try {
sqlite
.prepare(
`SELECT ${column}
`SELECT "${column}"
FROM ${table} LIMIT 1`,
)
.get();
} catch {
try {
sqlite.exec(`ALTER TABLE ${table}
ADD COLUMN ${column} ${definition};`);
ADD COLUMN "${column}" ${definition};`);
} catch (alterError) {
databaseLogger.warn(`Failed to add column ${column} to ${table}`, {
operation: "schema_migration",
@@ -405,6 +531,7 @@ const migrateSchema = () => {
"INTEGER NOT NULL DEFAULT 1",
);
addColumnIfNotExists("ssh_data", "tunnel_connections", "TEXT");
addColumnIfNotExists("ssh_data", "jump_hosts", "TEXT");
addColumnIfNotExists(
"ssh_data",
"enable_file_manager",
@@ -428,7 +555,12 @@ const migrateSchema = () => {
addColumnIfNotExists(
"ssh_data",
"credential_id",
"INTEGER REFERENCES ssh_credentials(id)",
"INTEGER REFERENCES ssh_credentials(id) ON DELETE SET NULL",
);
addColumnIfNotExists(
"ssh_data",
"override_credential_username",
"INTEGER",
);
addColumnIfNotExists("ssh_data", "autostart_password", "TEXT");
@@ -436,15 +568,90 @@ const migrateSchema = () => {
addColumnIfNotExists("ssh_data", "autostart_key_password", "TEXT");
addColumnIfNotExists("ssh_data", "stats_config", "TEXT");
addColumnIfNotExists("ssh_data", "terminal_config", "TEXT");
addColumnIfNotExists("ssh_data", "quick_actions", "TEXT");
addColumnIfNotExists(
"ssh_data",
"enable_docker",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists("ssh_data", "docker_config", "TEXT");
addColumnIfNotExists("ssh_data", "notes", "TEXT");
addColumnIfNotExists("ssh_data", "use_socks5", "INTEGER");
addColumnIfNotExists("ssh_data", "socks5_host", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_port", "INTEGER");
addColumnIfNotExists("ssh_data", "socks5_username", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_password", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_proxy_chain", "TEXT");
addColumnIfNotExists(
"ssh_data",
"show_terminal_in_sidebar",
"INTEGER NOT NULL DEFAULT 1",
);
addColumnIfNotExists(
"ssh_data",
"show_file_manager_in_sidebar",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists(
"ssh_data",
"show_tunnel_in_sidebar",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists(
"ssh_data",
"show_docker_in_sidebar",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists(
"ssh_data",
"show_server_stats_in_sidebar",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "public_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "detected_key_type", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_password", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_key_password", "TEXT");
addColumnIfNotExists("file_manager_recent", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("snippets", "folder", "TEXT");
addColumnIfNotExists("snippets", "order", "INTEGER NOT NULL DEFAULT 0");
try {
sqlite
.prepare("SELECT id FROM snippet_folders LIMIT 1")
.get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS snippet_folders (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
name TEXT NOT NULL,
color TEXT,
icon TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create snippet_folders table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite
.prepare("SELECT id FROM sessions LIMIT 1")
@@ -472,6 +679,365 @@ const migrateSchema = () => {
}
}
try {
sqlite
.prepare("SELECT id FROM network_topology LIMIT 1")
.get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS network_topology (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
topology TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create network_topology table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite
.prepare("SELECT id FROM dashboard_preferences LIMIT 1")
.get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS dashboard_preferences (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL UNIQUE,
layout TEXT NOT NULL,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create dashboard_preferences table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM host_access LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS host_access (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT,
role_id INTEGER,
granted_by TEXT NOT NULL,
permission_level TEXT NOT NULL DEFAULT 'use',
expires_at TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_accessed_at TEXT,
access_count INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create host_access table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT role_id FROM host_access LIMIT 1").get();
} catch {
try {
sqlite.exec("ALTER TABLE host_access ADD COLUMN role_id INTEGER REFERENCES roles(id) ON DELETE CASCADE");
} catch (alterError) {
databaseLogger.warn("Failed to add role_id column", {
operation: "schema_migration",
error: alterError,
});
}
}
try {
sqlite.prepare("SELECT sudo_password FROM ssh_data LIMIT 1").get();
} catch {
try {
sqlite.exec("ALTER TABLE ssh_data ADD COLUMN sudo_password TEXT");
} catch (alterError) {
databaseLogger.warn("Failed to add sudo_password column", {
operation: "schema_migration",
error: alterError,
});
}
}
try {
sqlite.prepare("SELECT id FROM roles LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
display_name TEXT NOT NULL,
description TEXT,
is_system INTEGER NOT NULL DEFAULT 0,
permissions TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create roles table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM user_roles LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS user_roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
role_id INTEGER NOT NULL,
granted_by TEXT,
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, role_id),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create user_roles table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM audit_logs LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS audit_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
username TEXT NOT NULL,
action TEXT NOT NULL,
resource_type TEXT NOT NULL,
resource_id TEXT,
resource_name TEXT,
details TEXT,
ip_address TEXT,
user_agent TEXT,
success INTEGER NOT NULL,
error_message TEXT,
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create audit_logs table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM session_recordings LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS session_recordings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT NOT NULL,
access_id INTEGER,
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
ended_at TEXT,
duration INTEGER,
commands TEXT,
dangerous_actions TEXT,
recording_path TEXT,
terminated_by_owner INTEGER DEFAULT 0,
termination_reason TEXT,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create session_recordings table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM shared_credentials LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS shared_credentials (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_access_id INTEGER NOT NULL,
original_credential_id INTEGER NOT NULL,
target_user_id TEXT NOT NULL,
encrypted_username TEXT NOT NULL,
encrypted_auth_type TEXT NOT NULL,
encrypted_password TEXT,
encrypted_key TEXT,
encrypted_key_password TEXT,
encrypted_key_type TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
needs_re_encryption INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_access_id) REFERENCES host_access (id) ON DELETE CASCADE,
FOREIGN KEY (original_credential_id) REFERENCES ssh_credentials (id) ON DELETE CASCADE,
FOREIGN KEY (target_user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create shared_credentials table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
const existingRoles = sqlite.prepare("SELECT name, is_system FROM roles").all() as Array<{ name: string; is_system: number }>;
try {
const validSystemRoles = ['admin', 'user'];
const unwantedRoleNames = ['superAdmin', 'powerUser', 'readonly', 'member'];
let deletedCount = 0;
const deleteByName = sqlite.prepare("DELETE FROM roles WHERE name = ?");
for (const roleName of unwantedRoleNames) {
const result = deleteByName.run(roleName);
if (result.changes > 0) {
deletedCount += result.changes;
}
}
const deleteOldSystemRole = sqlite.prepare("DELETE FROM roles WHERE name = ? AND is_system = 1");
for (const role of existingRoles) {
if (role.is_system === 1 && !validSystemRoles.includes(role.name) && !unwantedRoleNames.includes(role.name)) {
const result = deleteOldSystemRole.run(role.name);
if (result.changes > 0) {
deletedCount += result.changes;
}
}
}
} catch (cleanupError) {
databaseLogger.warn("Failed to clean up old system roles", {
operation: "schema_migration",
error: cleanupError,
});
}
const systemRoles = [
{
name: "admin",
displayName: "rbac.roles.admin",
description: "Administrator with full access",
permissions: null,
},
{
name: "user",
displayName: "rbac.roles.user",
description: "Regular user",
permissions: null,
},
];
for (const role of systemRoles) {
const existingRole = sqlite.prepare("SELECT id FROM roles WHERE name = ?").get(role.name);
if (!existingRole) {
try {
sqlite.prepare(`
INSERT INTO roles (name, display_name, description, is_system, permissions)
VALUES (?, ?, ?, 1, ?)
`).run(role.name, role.displayName, role.description, role.permissions);
} catch (insertError) {
databaseLogger.warn(`Failed to create system role: ${role.name}`, {
operation: "schema_migration",
error: insertError,
});
}
}
}
try {
const adminUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 1").all() as { id: string }[];
const normalUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 0").all() as { id: string }[];
const adminRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'admin'").get() as { id: number } | undefined;
const userRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'user'").get() as { id: number } | undefined;
if (adminRole) {
const insertUserRole = sqlite.prepare(`
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
`);
for (const admin of adminUsers) {
try {
insertUserRole.run(admin.id, adminRole.id);
} catch (error) {
// Ignore duplicate errors
}
}
}
if (userRole) {
const insertUserRole = sqlite.prepare(`
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
`);
for (const user of normalUsers) {
try {
insertUserRole.run(user.id, userRole.id);
} catch (error) {
// Ignore duplicate errors
}
}
}
} catch (migrationError) {
databaseLogger.warn("Failed to migrate existing users to roles", {
operation: "schema_migration",
error: migrationError,
});
}
} catch (seedError) {
databaseLogger.warn("Failed to seed system roles", {
operation: "schema_migration",
error: seedError,
});
}
databaseLogger.success("Schema migration completed", {
operation: "schema_migration",
});

View File

@@ -34,7 +34,7 @@ export const sessions = sqliteTable("sessions", {
id: text("id").primaryKey(),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
jwtToken: text("jwt_token").notNull(),
deviceType: text("device_type").notNull(),
deviceInfo: text("device_info").notNull(),
@@ -51,7 +51,7 @@ export const sshData = sqliteTable("ssh_data", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
name: text("name"),
ip: text("ip").notNull(),
port: integer("port").notNull(),
@@ -66,12 +66,16 @@ export const sshData = sqliteTable("ssh_data", {
key: text("key", { length: 8192 }),
key_password: text("key_password"),
keyType: text("key_type"),
sudoPassword: text("sudo_password"),
autostartPassword: text("autostart_password"),
autostartKey: text("autostart_key", { length: 8192 }),
autostartKeyPassword: text("autostart_key_password"),
credentialId: integer("credential_id").references(() => sshCredentials.id),
credentialId: integer("credential_id").references(() => sshCredentials.id, { onDelete: "set null" }),
overrideCredentialUsername: integer("override_credential_username", {
mode: "boolean",
}),
enableTerminal: integer("enable_terminal", { mode: "boolean" })
.notNull()
.default(true),
@@ -79,12 +83,41 @@ export const sshData = sqliteTable("ssh_data", {
.notNull()
.default(true),
tunnelConnections: text("tunnel_connections"),
jumpHosts: text("jump_hosts"),
enableFileManager: integer("enable_file_manager", { mode: "boolean" })
.notNull()
.default(true),
enableDocker: integer("enable_docker", { mode: "boolean" })
.notNull()
.default(false),
showTerminalInSidebar: integer("show_terminal_in_sidebar", { mode: "boolean" })
.notNull()
.default(true),
showFileManagerInSidebar: integer("show_file_manager_in_sidebar", { mode: "boolean" })
.notNull()
.default(false),
showTunnelInSidebar: integer("show_tunnel_in_sidebar", { mode: "boolean" })
.notNull()
.default(false),
showDockerInSidebar: integer("show_docker_in_sidebar", { mode: "boolean" })
.notNull()
.default(false),
showServerStatsInSidebar: integer("show_server_stats_in_sidebar", { mode: "boolean" })
.notNull()
.default(false),
defaultPath: text("default_path"),
statsConfig: text("stats_config"),
terminalConfig: text("terminal_config"),
quickActions: text("quick_actions"),
notes: text("notes"),
useSocks5: integer("use_socks5", { mode: "boolean" }),
socks5Host: text("socks5_host"),
socks5Port: integer("socks5_port"),
socks5Username: text("socks5_username"),
socks5Password: text("socks5_password"),
socks5ProxyChain: text("socks5_proxy_chain"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -97,10 +130,10 @@ export const fileManagerRecent = sqliteTable("file_manager_recent", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id),
.references(() => sshData.id, { onDelete: "cascade" }),
name: text("name").notNull(),
path: text("path").notNull(),
lastOpened: text("last_opened")
@@ -112,10 +145,10 @@ export const fileManagerPinned = sqliteTable("file_manager_pinned", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id),
.references(() => sshData.id, { onDelete: "cascade" }),
name: text("name").notNull(),
path: text("path").notNull(),
pinnedAt: text("pinned_at")
@@ -127,10 +160,10 @@ export const fileManagerShortcuts = sqliteTable("file_manager_shortcuts", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id),
.references(() => sshData.id, { onDelete: "cascade" }),
name: text("name").notNull(),
path: text("path").notNull(),
createdAt: text("created_at")
@@ -142,7 +175,7 @@ export const dismissedAlerts = sqliteTable("dismissed_alerts", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
alertId: text("alert_id").notNull(),
dismissedAt: text("dismissed_at")
.notNull()
@@ -153,7 +186,7 @@ export const sshCredentials = sqliteTable("ssh_credentials", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
name: text("name").notNull(),
description: text("description"),
folder: text("folder"),
@@ -167,6 +200,11 @@ export const sshCredentials = sqliteTable("ssh_credentials", {
key_password: text("key_password"),
keyType: text("key_type"),
detectedKeyType: text("detected_key_type"),
systemPassword: text("system_password"),
systemKey: text("system_key", { length: 16384 }),
systemKeyPassword: text("system_key_password"),
usageCount: integer("usage_count").notNull().default(0),
lastUsed: text("last_used"),
createdAt: text("created_at")
@@ -181,13 +219,13 @@ export const sshCredentialUsage = sqliteTable("ssh_credential_usage", {
id: integer("id").primaryKey({ autoIncrement: true }),
credentialId: integer("credential_id")
.notNull()
.references(() => sshCredentials.id),
.references(() => sshCredentials.id, { onDelete: "cascade" }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id),
.references(() => sshData.id, { onDelete: "cascade" }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
usedAt: text("used_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -197,10 +235,44 @@ export const snippets = sqliteTable("snippets", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
name: text("name").notNull(),
content: text("content").notNull(),
description: text("description"),
folder: text("folder"),
order: integer("order").notNull().default(0),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const snippetFolders = sqliteTable("snippet_folders", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
name: text("name").notNull(),
color: text("color"),
icon: text("icon"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const sshFolders = sqliteTable("ssh_folders", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
name: text("name").notNull(),
color: text("color"),
icon: text("icon"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -213,13 +285,209 @@ export const recentActivity = sqliteTable("recent_activity", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id),
.references(() => users.id, { onDelete: "cascade" }),
type: text("type").notNull(),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id),
hostName: text("host_name").notNull(),
.references(() => sshData.id, { onDelete: "cascade" }),
hostName: text("host_name"),
timestamp: text("timestamp")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const commandHistory = sqliteTable("command_history", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id, { onDelete: "cascade" }),
command: text("command").notNull(),
executedAt: text("executed_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const networkTopology = sqliteTable("network_topology", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
topology: text("topology"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const dashboardPreferences = sqliteTable("dashboard_preferences", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.unique()
.references(() => users.id, { onDelete: "cascade" }),
layout: text("layout").notNull(),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const hostAccess = sqliteTable("host_access", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id, { onDelete: "cascade" }),
userId: text("user_id")
.references(() => users.id, { onDelete: "cascade" }),
roleId: integer("role_id")
.references(() => roles.id, { onDelete: "cascade" }),
grantedBy: text("granted_by")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
permissionLevel: text("permission_level")
.notNull()
.default("view"),
expiresAt: text("expires_at"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
lastAccessedAt: text("last_accessed_at"),
accessCount: integer("access_count").notNull().default(0),
});
export const sharedCredentials = sqliteTable("shared_credentials", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostAccessId: integer("host_access_id")
.notNull()
.references(() => hostAccess.id, { onDelete: "cascade" }),
originalCredentialId: integer("original_credential_id")
.notNull()
.references(() => sshCredentials.id, { onDelete: "cascade" }),
targetUserId: text("target_user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
encryptedUsername: text("encrypted_username").notNull(),
encryptedAuthType: text("encrypted_auth_type").notNull(),
encryptedPassword: text("encrypted_password"),
encryptedKey: text("encrypted_key", { length: 16384 }),
encryptedKeyPassword: text("encrypted_key_password"),
encryptedKeyType: text("encrypted_key_type"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
needsReEncryption: integer("needs_re_encryption", { mode: "boolean" })
.notNull()
.default(false),
});
export const roles = sqliteTable("roles", {
id: integer("id").primaryKey({ autoIncrement: true }),
name: text("name").notNull().unique(),
displayName: text("display_name").notNull(),
description: text("description"),
isSystem: integer("is_system", { mode: "boolean" })
.notNull()
.default(false),
permissions: text("permissions"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const userRoles = sqliteTable("user_roles", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
roleId: integer("role_id")
.notNull()
.references(() => roles.id, { onDelete: "cascade" }),
grantedBy: text("granted_by").references(() => users.id, {
onDelete: "set null",
}),
grantedAt: text("granted_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const auditLogs = sqliteTable("audit_logs", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
username: text("username").notNull(),
action: text("action").notNull(),
resourceType: text("resource_type").notNull(),
resourceId: text("resource_id"),
resourceName: text("resource_name"),
details: text("details"),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
success: integer("success", { mode: "boolean" }).notNull(),
errorMessage: text("error_message"),
timestamp: text("timestamp")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const sessionRecordings = sqliteTable("session_recordings", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id, { onDelete: "cascade" }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
accessId: integer("access_id").references(() => hostAccess.id, {
onDelete: "set null",
}),
startedAt: text("started_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
endedAt: text("ended_at"),
duration: integer("duration"),
commands: text("commands"),
dangerousActions: text("dangerous_actions"),
recordingPath: text("recording_path"),
terminatedByOwner: integer("terminated_by_owner", { mode: "boolean" })
.default(false),
terminationReason: text("termination_reason"),
});

View File

@@ -99,8 +99,20 @@ const router = express.Router();
const authManager = AuthManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
// Route: Get alerts for the authenticated user (excluding dismissed ones)
// GET /alerts
/**
* @openapi
* /alerts:
* get:
* summary: Get active alerts
* description: Fetches active alerts for the authenticated user, excluding those that have been dismissed.
* tags:
* - Alerts
* responses:
* 200:
* description: A list of active alerts.
* 500:
* description: Failed to fetch alerts.
*/
router.get("/", authenticateJWT, async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -131,8 +143,33 @@ router.get("/", authenticateJWT, async (req, res) => {
}
});
// Route: Dismiss an alert for the authenticated user
// POST /alerts/dismiss
/**
* @openapi
* /alerts/dismiss:
* post:
* summary: Dismiss an alert
* description: Marks an alert as dismissed for the authenticated user.
* tags:
* - Alerts
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* alertId:
* type: string
* responses:
* 200:
* description: Alert dismissed successfully.
* 400:
* description: Alert ID is required.
* 409:
* description: Alert already dismissed.
* 500:
* description: Failed to dismiss alert.
*/
router.post("/dismiss", authenticateJWT, async (req, res) => {
try {
const { alertId } = req.body;
@@ -170,8 +207,20 @@ router.post("/dismiss", authenticateJWT, async (req, res) => {
}
});
// Route: Get dismissed alerts for a user
// GET /alerts/dismissed/:userId
/**
* @openapi
* /alerts/dismissed:
* get:
* summary: Get dismissed alerts
* description: Fetches a list of alerts that have been dismissed by the authenticated user.
* tags:
* - Alerts
* responses:
* 200:
* description: A list of dismissed alerts.
* 500:
* description: Failed to fetch dismissed alerts.
*/
router.get("/dismissed", authenticateJWT, async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -194,8 +243,33 @@ router.get("/dismissed", authenticateJWT, async (req, res) => {
}
});
// Route: Undismiss an alert for the authenticated user (remove from dismissed list)
// DELETE /alerts/dismiss
/**
* @openapi
* /alerts/dismiss:
* delete:
* summary: Undismiss an alert
* description: Removes an alert from the dismissed list for the authenticated user.
* tags:
* - Alerts
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* alertId:
* type: string
* responses:
* 200:
* description: Alert undismissed successfully.
* 400:
* description: Alert ID is required.
* 404:
* description: Dismissed alert not found.
* 500:
* description: Failed to undismiss alert.
*/
router.delete("/dismiss", authenticateJWT, async (req, res) => {
try {
const { alertId } = req.body;

View File

@@ -1,7 +1,15 @@
import type { AuthenticatedRequest } from "../../../types/index.js";
import type {
AuthenticatedRequest,
CredentialBackend,
} from "../../../types/index.js";
import express from "express";
import { db } from "../db/index.js";
import { sshCredentials, sshCredentialUsage, sshData } from "../db/schema.js";
import {
sshCredentials,
sshCredentialUsage,
sshData,
hostAccess,
} from "../db/schema.js";
import { eq, and, desc, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { authLogger } from "../../utils/logger.js";
@@ -76,8 +84,52 @@ const authManager = AuthManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
const requireDataAccess = authManager.createDataAccessMiddleware();
// Create a new credential
// POST /credentials
/**
* @openapi
* /credentials:
* post:
* summary: Create a new credential
* description: Creates a new SSH credential for the authenticated user.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* name:
* type: string
* description:
* type: string
* folder:
* type: string
* tags:
* type: array
* items:
* type: string
* authType:
* type: string
* enum: [password, key]
* username:
* type: string
* password:
* type: string
* key:
* type: string
* keyPassword:
* type: string
* keyType:
* type: string
* responses:
* 201:
* description: Credential created successfully.
* 400:
* description: Invalid request body.
* 500:
* description: Failed to create credential.
*/
router.post(
"/",
authenticateJWT,
@@ -223,8 +275,22 @@ router.post(
},
);
// Get all credentials for the authenticated user
// GET /credentials
/**
* @openapi
* /credentials:
* get:
* summary: Get all credentials
* description: Retrieves all SSH credentials for the authenticated user.
* tags:
* - Credentials
* responses:
* 200:
* description: A list of credentials.
* 400:
* description: Invalid userId.
* 500:
* description: Failed to fetch credentials.
*/
router.get(
"/",
authenticateJWT,
@@ -256,8 +322,22 @@ router.get(
},
);
// Get all unique credential folders for the authenticated user
// GET /credentials/folders
/**
* @openapi
* /credentials/folders:
* get:
* summary: Get credential folders
* description: Retrieves all unique credential folders for the authenticated user.
* tags:
* - Credentials
* responses:
* 200:
* description: A list of folder names.
* 400:
* description: Invalid userId.
* 500:
* description: Failed to fetch credential folders.
*/
router.get(
"/folders",
authenticateJWT,
@@ -294,8 +374,30 @@ router.get(
},
);
// Get a specific credential by ID (with plain text secrets)
// GET /credentials/:id
/**
* @openapi
* /credentials/{id}:
* get:
* summary: Get a specific credential
* description: Retrieves a specific credential by its ID, including secrets.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: The requested credential.
* 400:
* description: Invalid request.
* 404:
* description: Credential not found.
* 500:
* description: Failed to fetch credential.
*/
router.get(
"/:id",
authenticateJWT,
@@ -358,8 +460,41 @@ router.get(
},
);
// Update a credential
// PUT /credentials/:id
/**
* @openapi
* /credentials/{id}:
* put:
* summary: Update a credential
* description: Updates a specific credential by its ID.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* name:
* type: string
* description:
* type: string
* responses:
* 200:
* description: The updated credential.
* 400:
* description: Invalid request.
* 404:
* description: Credential not found.
* 500:
* description: Failed to update credential.
*/
router.put(
"/:id",
authenticateJWT,
@@ -470,6 +605,14 @@ router.put(
userId,
);
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
await sharedCredManager.updateSharedCredentialsForOriginal(
parseInt(id),
userId,
);
const credential = updated[0];
authLogger.success(
`SSH credential updated: ${credential.name} (${credential.authType}) by user ${userId}`,
@@ -494,8 +637,30 @@ router.put(
},
);
// Delete a credential
// DELETE /credentials/:id
/**
* @openapi
* /credentials/{id}:
* delete:
* summary: Delete a credential
* description: Deletes a specific credential by its ID.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Credential deleted successfully.
* 400:
* description: Invalid request.
* 404:
* description: Credential not found.
* 500:
* description: Failed to delete credential.
*/
router.delete(
"/:id",
authenticateJWT,
@@ -550,16 +715,32 @@ router.delete(
eq(sshData.userId, userId),
),
);
for (const host of hostsUsingCredential) {
const revokedShares = await db
.delete(hostAccess)
.where(eq(hostAccess.hostId, host.id))
.returning({ id: hostAccess.id });
if (revokedShares.length > 0) {
authLogger.info(
"Auto-revoked host shares due to credential deletion",
{
operation: "auto_revoke_shares",
hostId: host.id,
credentialId: parseInt(id),
revokedCount: revokedShares.length,
reason: "credential_deleted",
},
);
}
}
}
await db
.delete(sshCredentialUsage)
.where(
and(
eq(sshCredentialUsage.credentialId, parseInt(id)),
eq(sshCredentialUsage.userId, userId),
),
);
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
await sharedCredManager.deleteSharedCredentialsForOriginal(parseInt(id));
await db
.delete(sshCredentials)
@@ -594,8 +775,35 @@ router.delete(
},
);
// Apply a credential to an SSH host (for quick application)
// POST /credentials/:id/apply-to-host/:hostId
/**
* @openapi
* /credentials/{id}/apply-to-host/{hostId}:
* post:
* summary: Apply a credential to a host
* description: Applies a credential to an SSH host for quick application.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* - in: path
* name: hostId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Credential applied to host successfully.
* 400:
* description: Invalid request.
* 404:
* description: Credential not found.
* 500:
* description: Failed to apply credential to host.
*/
router.post(
"/:id/apply-to-host/:hostId",
authenticateJWT,
@@ -673,8 +881,28 @@ router.post(
},
);
// Get hosts using a specific credential
// GET /credentials/:id/hosts
/**
* @openapi
* /credentials/{id}/hosts:
* get:
* summary: Get hosts using a credential
* description: Retrieves a list of hosts that are using a specific credential.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: A list of hosts.
* 400:
* description: Invalid request.
* 500:
* description: Failed to fetch hosts using credential.
*/
router.get(
"/:id/hosts",
authenticateJWT,
@@ -768,8 +996,33 @@ function formatSSHHostOutput(
};
}
// Rename a credential folder
// PUT /credentials/folders/rename
/**
* @openapi
* /credentials/folders/rename:
* put:
* summary: Rename a credential folder
* description: Renames a credential folder for the authenticated user.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* oldName:
* type: string
* newName:
* type: string
* responses:
* 200:
* description: Folder renamed successfully.
* 400:
* description: Both oldName and newName are required.
* 500:
* description: Failed to rename folder.
*/
router.put(
"/folders/rename",
authenticateJWT,
@@ -808,8 +1061,33 @@ router.put(
},
);
// Detect SSH key type endpoint
// POST /credentials/detect-key-type
/**
* @openapi
* /credentials/detect-key-type:
* post:
* summary: Detect SSH key type
* description: Detects the type of an SSH private key.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* privateKey:
* type: string
* keyPassword:
* type: string
* responses:
* 200:
* description: Key type detection result.
* 400:
* description: Private key is required.
* 500:
* description: Failed to detect key type.
*/
router.post(
"/detect-key-type",
authenticateJWT,
@@ -842,8 +1120,31 @@ router.post(
},
);
// Detect SSH public key type endpoint
// POST /credentials/detect-public-key-type
/**
* @openapi
* /credentials/detect-public-key-type:
* post:
* summary: Detect SSH public key type
* description: Detects the type of an SSH public key.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* publicKey:
* type: string
* responses:
* 200:
* description: Key type detection result.
* 400:
* description: Public key is required.
* 500:
* description: Failed to detect public key type.
*/
router.post(
"/detect-public-key-type",
authenticateJWT,
@@ -877,8 +1178,35 @@ router.post(
},
);
// Validate SSH key pair endpoint
// POST /credentials/validate-key-pair
/**
* @openapi
* /credentials/validate-key-pair:
* post:
* summary: Validate SSH key pair
* description: Validates if a given SSH private key and public key match.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* privateKey:
* type: string
* publicKey:
* type: string
* keyPassword:
* type: string
* responses:
* 200:
* description: Key pair validation result.
* 400:
* description: Private key and public key are required.
* 500:
* description: Failed to validate key pair.
*/
router.post(
"/validate-key-pair",
authenticateJWT,
@@ -921,8 +1249,32 @@ router.post(
},
);
// Generate new SSH key pair endpoint
// POST /credentials/generate-key-pair
/**
* @openapi
* /credentials/generate-key-pair:
* post:
* summary: Generate new SSH key pair
* description: Generates a new SSH key pair.
* tags:
* - Credentials
* requestBody:
* content:
* application/json:
* schema:
* type: object
* properties:
* keyType:
* type: string
* keySize:
* type: integer
* passphrase:
* type: string
* responses:
* 200:
* description: The new key pair.
* 500:
* description: Failed to generate SSH key pair.
*/
router.post(
"/generate-key-pair",
authenticateJWT,
@@ -964,8 +1316,33 @@ router.post(
},
);
// Generate public key from private key endpoint
// POST /credentials/generate-public-key
/**
* @openapi
* /credentials/generate-public-key:
* post:
* summary: Generate public key from private key
* description: Generates a public key from a given private key.
* tags:
* - Credentials
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* privateKey:
* type: string
* keyPassword:
* type: string
* responses:
* 200:
* description: The generated public key.
* 400:
* description: Private key is required.
* 500:
* description: Failed to generate public key.
*/
router.post(
"/generate-public-key",
authenticateJWT,
@@ -1128,10 +1505,9 @@ router.post(
async function deploySSHKeyToHost(
hostConfig: Record<string, unknown>,
publicKey: string,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_credentialData: Record<string, unknown>,
credData: CredentialBackend,
): Promise<{ success: boolean; message?: string; error?: string }> {
const publicKey = credData.public_key as string;
return new Promise((resolve) => {
const conn = new Client();
@@ -1252,13 +1628,17 @@ async function deploySSHKeyToHost(
.replace(/'/g, "'\\''");
conn.exec(
`printf '%s\\n' '${escapedKey}' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
`printf '%s\n' '${escapedKey} ${credData.name}@Termix' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
(err, stream) => {
if (err) {
clearTimeout(addTimeout);
return rejectAdd(err);
}
stream.on("data", () => {
// Consume output
});
stream.on("close", (code) => {
clearTimeout(addTimeout);
if (code === 0) {
@@ -1467,8 +1847,41 @@ async function deploySSHKeyToHost(
});
}
// Deploy SSH Key to Host endpoint
// POST /credentials/:id/deploy-to-host
/**
* @openapi
* /credentials/{id}/deploy-to-host:
* post:
* summary: Deploy SSH key to a host
* description: Deploys an SSH public key to a target host's authorized_keys file.
* tags:
* - Credentials
* parameters:
* - in: path
* name: id
* required: true
* schema:
* type: integer
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* targetHostId:
* type: integer
* responses:
* 200:
* description: SSH key deployed successfully.
* 400:
* description: Credential ID and target host ID are required.
* 401:
* description: Authentication required.
* 404:
* description: Credential or target host not found.
* 500:
* description: Failed to deploy SSH key.
*/
router.post(
"/:id/deploy-to-host",
authenticateJWT,
@@ -1510,7 +1923,7 @@ router.post(
});
}
const credData = credential[0];
const credData = credential[0] as unknown as CredentialBackend;
if (credData.authType !== "key") {
return res.status(400).json({
@@ -1519,7 +1932,8 @@ router.post(
});
}
if (!credData.publicKey) {
const publicKey = credData.public_key;
if (!publicKey) {
return res.status(400).json({
success: false,
error: "Public key is required for deployment",
@@ -1598,11 +2012,7 @@ router.post(
}
}
const deployResult = await deploySSHKeyToHost(
hostConfig,
credData.publicKey as string,
credData,
);
const deployResult = await deploySSHKeyToHost(hostConfig, credData);
if (deployResult.success) {
res.json({

View File

@@ -0,0 +1,142 @@
import express from "express";
import { eq } from "drizzle-orm";
import { getDb } from "../db/index.js";
import { networkTopology } from "../db/schema.js";
import { AuthManager } from "../../utils/auth-manager.js";
import type { AuthenticatedRequest } from "../../../types/index.js";
const router = express.Router();
const authManager = AuthManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
/**
* @openapi
* /network-topology:
* get:
* summary: Get network topology
* description: Retrieves the network topology for the authenticated user.
* tags:
* - Network Topology
* responses:
* 200:
* description: The network topology.
* 401:
* description: User not authenticated.
* 500:
* description: Failed to fetch network topology.
*/
router.get(
"/",
authenticateJWT,
async (req: express.Request, res: express.Response) => {
try {
const userId = (req as AuthenticatedRequest).userId;
if (!userId) {
return res.status(401).json({ error: "User not authenticated" });
}
const db = getDb();
const result = await db
.select()
.from(networkTopology)
.where(eq(networkTopology.userId, userId));
if (result.length > 0) {
const topologyStr = result[0].topology;
const topology = topologyStr ? JSON.parse(topologyStr) : null;
return res.json(topology);
} else {
return res.json(null);
}
} catch (error) {
console.error("Error fetching network topology:", error);
return res
.status(500)
.json({
error: "Failed to fetch network topology",
details: (error as Error).message,
});
}
},
);
/**
* @openapi
* /network-topology:
* post:
* summary: Save network topology
* description: Saves the network topology for the authenticated user.
* tags:
* - Network Topology
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* topology:
* type: object
* responses:
* 200:
* description: Network topology saved successfully.
* 400:
* description: Topology data is required.
* 401:
* description: User not authenticated.
* 500:
* description: Failed to save network topology.
*/
router.post(
"/",
authenticateJWT,
async (req: express.Request, res: express.Response) => {
try {
const userId = (req as AuthenticatedRequest).userId;
if (!userId) {
return res.status(401).json({ error: "User not authenticated" });
}
const { topology } = req.body;
if (!topology) {
return res.status(400).json({ error: "Topology data is required" });
}
const db = getDb();
// Ensure topology is a string
const topologyStr =
typeof topology === "string" ? topology : JSON.stringify(topology);
const existing = await db
.select()
.from(networkTopology)
.where(eq(networkTopology.userId, userId));
if (existing.length > 0) {
// Update existing record
await db
.update(networkTopology)
.set({ topology: topologyStr })
.where(eq(networkTopology.userId, userId));
} else {
// Insert new record
await db
.insert(networkTopology)
.values({ userId, topology: topologyStr });
}
return res.json({ success: true });
} catch (error) {
console.error("Error saving network topology:", error);
return res
.status(500)
.json({
error: "Failed to save network topology",
details: (error as Error).message,
});
}
},
);
export default router;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,285 @@
import type { AuthenticatedRequest } from "../../../types/index.js";
import express from "express";
import { db } from "../db/index.js";
import { commandHistory } from "../db/schema.js";
import { eq, and, desc, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { authLogger } from "../../utils/logger.js";
import { AuthManager } from "../../utils/auth-manager.js";
const router = express.Router();
function isNonEmptyString(val: unknown): val is string {
return typeof val === "string" && val.trim().length > 0;
}
const authManager = AuthManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
const requireDataAccess = authManager.createDataAccessMiddleware();
/**
* @openapi
* /terminal/command_history:
* post:
* summary: Save command to history
* description: Saves a command to the command history for a specific host.
* tags:
* - Terminal
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* hostId:
* type: integer
* command:
* type: string
* responses:
* 201:
* description: Command saved successfully.
* 400:
* description: Missing required parameters.
* 500:
* description: Failed to save command.
*/
router.post(
"/command_history",
authenticateJWT,
requireDataAccess,
async (req: Request, res: Response) => {
const userId = (req as AuthenticatedRequest).userId;
const { hostId, command } = req.body;
if (!isNonEmptyString(userId) || !hostId || !isNonEmptyString(command)) {
authLogger.warn("Invalid command history save request", {
operation: "command_history_save",
userId,
hasHostId: !!hostId,
hasCommand: !!command,
});
return res.status(400).json({ error: "Missing required parameters" });
}
try {
const insertData = {
userId,
hostId: parseInt(hostId, 10),
command: command.trim(),
};
const result = await db
.insert(commandHistory)
.values(insertData)
.returning();
res.status(201).json(result[0]);
} catch (err) {
authLogger.error("Failed to save command to history", err);
res.status(500).json({
error: err instanceof Error ? err.message : "Failed to save command",
});
}
},
);
/**
* @openapi
* /terminal/command_history/{hostId}:
* get:
* summary: Get command history
* description: Retrieves the command history for a specific host.
* tags:
* - Terminal
* parameters:
* - in: path
* name: hostId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: A list of commands.
* 400:
* description: Invalid request parameters.
* 500:
* description: Failed to fetch history.
*/
router.get(
"/command_history/:hostId",
authenticateJWT,
requireDataAccess,
async (req: Request, res: Response) => {
const userId = (req as AuthenticatedRequest).userId;
const { hostId } = req.params;
const hostIdNum = parseInt(hostId, 10);
if (!isNonEmptyString(userId) || isNaN(hostIdNum)) {
authLogger.warn("Invalid command history fetch request", {
userId,
hostId: hostIdNum,
});
return res.status(400).json({ error: "Invalid request parameters" });
}
try {
const result = await db
.select({
command: commandHistory.command,
maxExecutedAt: sql<number>`MAX(${commandHistory.executedAt})`,
})
.from(commandHistory)
.where(
and(
eq(commandHistory.userId, userId),
eq(commandHistory.hostId, hostIdNum),
),
)
.groupBy(commandHistory.command)
.orderBy(desc(sql`MAX(${commandHistory.executedAt})`))
.limit(500);
const uniqueCommands = result.map((r) => r.command);
res.json(uniqueCommands);
} catch (err) {
authLogger.error("Failed to fetch command history", err);
res.status(500).json({
error: err instanceof Error ? err.message : "Failed to fetch history",
});
}
},
);
/**
* @openapi
* /terminal/command_history/delete:
* post:
* summary: Delete a specific command from history
* description: Deletes a specific command from the history of a host.
* tags:
* - Terminal
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* hostId:
* type: integer
* command:
* type: string
* responses:
* 200:
* description: Command deleted successfully.
* 400:
* description: Missing required parameters.
* 500:
* description: Failed to delete command.
*/
router.post(
"/command_history/delete",
authenticateJWT,
requireDataAccess,
async (req: Request, res: Response) => {
const userId = (req as AuthenticatedRequest).userId;
const { hostId, command } = req.body;
if (!isNonEmptyString(userId) || !hostId || !isNonEmptyString(command)) {
authLogger.warn("Invalid command delete request", {
operation: "command_history_delete",
userId,
hasHostId: !!hostId,
hasCommand: !!command,
});
return res.status(400).json({ error: "Missing required parameters" });
}
try {
const hostIdNum = parseInt(hostId, 10);
await db
.delete(commandHistory)
.where(
and(
eq(commandHistory.userId, userId),
eq(commandHistory.hostId, hostIdNum),
eq(commandHistory.command, command.trim()),
),
);
res.json({ success: true });
} catch (err) {
authLogger.error("Failed to delete command from history", err);
res.status(500).json({
error: err instanceof Error ? err.message : "Failed to delete command",
});
}
},
);
/**
* @openapi
* /terminal/command_history/{hostId}:
* delete:
* summary: Clear command history
* description: Clears the entire command history for a specific host.
* tags:
* - Terminal
* parameters:
* - in: path
* name: hostId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Command history cleared successfully.
* 400:
* description: Invalid request.
* 500:
* description: Failed to clear history.
*/
router.delete(
"/command_history/:hostId",
authenticateJWT,
requireDataAccess,
async (req: Request, res: Response) => {
const userId = (req as AuthenticatedRequest).userId;
const { hostId } = req.params;
const hostIdNum = parseInt(hostId, 10);
if (!isNonEmptyString(userId) || isNaN(hostIdNum)) {
authLogger.warn("Invalid command history clear request");
return res.status(400).json({ error: "Invalid request" });
}
try {
await db
.delete(commandHistory)
.where(
and(
eq(commandHistory.userId, userId),
eq(commandHistory.hostId, hostIdNum),
),
);
authLogger.success(`Command history cleared for host ${hostId}`, {
operation: "command_history_clear_success",
userId,
hostId: hostIdNum,
});
res.json({ success: true });
} catch (err) {
authLogger.error("Failed to clear command history", err);
res.status(500).json({
error: err instanceof Error ? err.message : "Failed to clear history",
});
}
},
);
export default router;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,632 @@
import { Client as SSHClient } from "ssh2";
import { WebSocketServer, WebSocket } from "ws";
import { parse as parseUrl } from "url";
import { AuthManager } from "../utils/auth-manager.js";
import { sshData, sshCredentials } from "../database/db/schema.js";
import { and, eq } from "drizzle-orm";
import { getDb } from "../database/db/index.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { systemLogger } from "../utils/logger.js";
import type { SSHHost } from "../../types/index.js";
const dockerConsoleLogger = systemLogger;
interface SSHSession {
client: SSHClient;
stream: any;
isConnected: boolean;
containerId?: string;
shell?: string;
}
const activeSessions = new Map<string, SSHSession>();
const wss = new WebSocketServer({
host: "0.0.0.0",
port: 30008,
verifyClient: async (info) => {
try {
const url = parseUrl(info.req.url || "", true);
const token = url.query.token as string;
if (!token) {
return false;
}
const authManager = AuthManager.getInstance();
const decoded = await authManager.verifyJWTToken(token);
if (!decoded || !decoded.userId) {
return false;
}
return true;
} catch (error) {
return false;
}
},
});
async function detectShell(
session: SSHSession,
containerId: string,
): Promise<string> {
const shells = ["bash", "sh", "ash"];
for (const shell of shells) {
try {
await new Promise<void>((resolve, reject) => {
session.client.exec(
`docker exec ${containerId} which ${shell}`,
(err, stream) => {
if (err) return reject(err);
let output = "";
stream.on("data", (data: Buffer) => {
output += data.toString();
});
stream.on("close", (code: number) => {
if (code === 0 && output.trim()) {
resolve();
} else {
reject(new Error(`Shell ${shell} not found`));
}
});
stream.stderr.on("data", () => {
// Ignore stderr
});
},
);
});
return shell;
} catch {
continue;
}
}
return "sh";
}
async function createJumpHostChain(
jumpHosts: any[],
userId: string,
): Promise<SSHClient | null> {
if (!jumpHosts || jumpHosts.length === 0) {
return null;
}
let currentClient: SSHClient | null = null;
for (let i = 0; i < jumpHosts.length; i++) {
const jumpHostId = jumpHosts[i].hostId;
const jumpHostData = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
.where(and(eq(sshData.id, jumpHostId), eq(sshData.userId, userId))),
"ssh_data",
userId,
);
if (jumpHostData.length === 0) {
throw new Error(`Jump host ${jumpHostId} not found`);
}
const jumpHost = jumpHostData[0] as unknown as SSHHost;
if (typeof jumpHost.jumpHosts === "string" && jumpHost.jumpHosts) {
try {
jumpHost.jumpHosts = JSON.parse(jumpHost.jumpHosts);
} catch (e) {
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
hostId: jumpHost.id,
});
jumpHost.jumpHosts = [];
}
}
let resolvedCredentials: any = {
password: jumpHost.password,
sshKey: jumpHost.key,
keyPassword: jumpHost.keyPassword,
authType: jumpHost.authType,
};
if (jumpHost.credentialId) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, jumpHost.credentialId as number),
eq(sshCredentials.userId, userId),
),
),
"ssh_credentials",
userId,
);
if (credentials.length > 0) {
const credential = credentials[0];
resolvedCredentials = {
password: credential.password,
sshKey:
credential.private_key || credential.privateKey || credential.key,
keyPassword: credential.key_password || credential.keyPassword,
authType: credential.auth_type || credential.authType,
};
}
}
const client = new SSHClient();
const config: any = {
host: jumpHost.ip,
port: jumpHost.port || 22,
username: jumpHost.username,
tryKeyboard: true,
readyTimeout: 60000,
keepaliveInterval: 30000,
keepaliveCountMax: 120,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
};
if (
resolvedCredentials.authType === "password" &&
resolvedCredentials.password
) {
config.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.sshKey
) {
const cleanKey = resolvedCredentials.sshKey
.trim()
.replace(/\r\n/g, "\n")
.replace(/\r/g, "\n");
config.privateKey = Buffer.from(cleanKey, "utf8");
if (resolvedCredentials.keyPassword) {
config.passphrase = resolvedCredentials.keyPassword;
}
}
if (currentClient) {
await new Promise<void>((resolve, reject) => {
currentClient!.forwardOut(
"127.0.0.1",
0,
jumpHost.ip,
jumpHost.port || 22,
(err, stream) => {
if (err) return reject(err);
config.sock = stream;
resolve();
},
);
});
}
await new Promise<void>((resolve, reject) => {
client.on("ready", () => resolve());
client.on("error", reject);
client.connect(config);
});
currentClient = client;
}
return currentClient;
}
wss.on("connection", async (ws: WebSocket, req) => {
const userId = (req as any).userId;
const sessionId = `docker-console-${Date.now()}-${Math.random()}`;
let sshSession: SSHSession | null = null;
ws.on("message", async (data) => {
try {
const message = JSON.parse(data.toString());
switch (message.type) {
case "connect": {
const { hostConfig, containerId, shell, cols, rows } =
message.data as {
hostConfig: SSHHost;
containerId: string;
shell?: string;
cols?: number;
rows?: number;
};
if (
typeof hostConfig.jumpHosts === "string" &&
hostConfig.jumpHosts
) {
try {
hostConfig.jumpHosts = JSON.parse(hostConfig.jumpHosts);
} catch (e) {
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
hostId: hostConfig.id,
});
hostConfig.jumpHosts = [];
}
}
if (!hostConfig || !containerId) {
ws.send(
JSON.stringify({
type: "error",
message: "Host configuration and container ID are required",
}),
);
return;
}
if (!hostConfig.enableDocker) {
ws.send(
JSON.stringify({
type: "error",
message:
"Docker is not enabled for this host. Enable it in Host Settings.",
}),
);
return;
}
try {
let resolvedCredentials: any = {
password: hostConfig.password,
sshKey: hostConfig.key,
keyPassword: hostConfig.keyPassword,
authType: hostConfig.authType,
};
if (hostConfig.credentialId) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, hostConfig.credentialId as number),
eq(sshCredentials.userId, userId),
),
),
"ssh_credentials",
userId,
);
if (credentials.length > 0) {
const credential = credentials[0];
resolvedCredentials = {
password: credential.password,
sshKey:
credential.private_key ||
credential.privateKey ||
credential.key,
keyPassword:
credential.key_password || credential.keyPassword,
authType: credential.auth_type || credential.authType,
};
}
}
const client = new SSHClient();
const config: any = {
host: hostConfig.ip,
port: hostConfig.port || 22,
username: hostConfig.username,
tryKeyboard: true,
readyTimeout: 60000,
keepaliveInterval: 30000,
keepaliveCountMax: 120,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
};
if (
resolvedCredentials.authType === "password" &&
resolvedCredentials.password
) {
config.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.sshKey
) {
const cleanKey = resolvedCredentials.sshKey
.trim()
.replace(/\r\n/g, "\n")
.replace(/\r/g, "\n");
config.privateKey = Buffer.from(cleanKey, "utf8");
if (resolvedCredentials.keyPassword) {
config.passphrase = resolvedCredentials.keyPassword;
}
}
if (hostConfig.jumpHosts && hostConfig.jumpHosts.length > 0) {
const jumpClient = await createJumpHostChain(
hostConfig.jumpHosts,
userId,
);
if (jumpClient) {
const stream = await new Promise<any>((resolve, reject) => {
jumpClient.forwardOut(
"127.0.0.1",
0,
hostConfig.ip,
hostConfig.port || 22,
(err, stream) => {
if (err) return reject(err);
resolve(stream);
},
);
});
config.sock = stream;
}
}
await new Promise<void>((resolve, reject) => {
client.on("ready", () => resolve());
client.on("error", reject);
client.connect(config);
});
sshSession = {
client,
stream: null,
isConnected: true,
containerId,
};
activeSessions.set(sessionId, sshSession);
let shellToUse = shell || "bash";
if (shell) {
try {
await new Promise<void>((resolve, reject) => {
client.exec(
`docker exec ${containerId} which ${shell}`,
(err, stream) => {
if (err) return reject(err);
let output = "";
stream.on("data", (data: Buffer) => {
output += data.toString();
});
stream.on("close", (code: number) => {
if (code === 0 && output.trim()) {
resolve();
} else {
reject(new Error(`Shell ${shell} not available`));
}
});
stream.stderr.on("data", () => {
// Ignore stderr
});
},
);
});
} catch {
dockerConsoleLogger.warn(
`Requested shell ${shell} not found, detecting available shell`,
{
operation: "shell_validation",
sessionId,
containerId,
requestedShell: shell,
},
);
shellToUse = await detectShell(sshSession, containerId);
}
} else {
shellToUse = await detectShell(sshSession, containerId);
}
sshSession.shell = shellToUse;
const execCommand = `docker exec -it ${containerId} /bin/${shellToUse}`;
client.exec(
execCommand,
{
pty: {
term: "xterm-256color",
cols: cols || 80,
rows: rows || 24,
},
},
(err, stream) => {
if (err) {
dockerConsoleLogger.error(
"Failed to create docker exec",
err,
{
operation: "docker_exec",
sessionId,
containerId,
},
);
ws.send(
JSON.stringify({
type: "error",
message: `Failed to start console: ${err.message}`,
}),
);
return;
}
sshSession!.stream = stream;
stream.on("data", (data: Buffer) => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
type: "output",
data: data.toString("utf8"),
}),
);
}
});
stream.stderr.on("data", (data: Buffer) => {});
stream.on("close", () => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
type: "disconnected",
message: "Console session ended",
}),
);
}
if (sshSession) {
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
ws.send(
JSON.stringify({
type: "connected",
data: {
shell: shellToUse,
requestedShell: shell,
shellChanged: shell && shell !== shellToUse,
},
}),
);
},
);
} catch (error) {
dockerConsoleLogger.error("Failed to connect to container", error, {
operation: "console_connect",
sessionId,
containerId: message.data.containerId,
});
ws.send(
JSON.stringify({
type: "error",
message:
error instanceof Error
? error.message
: "Failed to connect to container",
}),
);
}
break;
}
case "input": {
if (sshSession && sshSession.stream) {
sshSession.stream.write(message.data);
}
break;
}
case "resize": {
if (sshSession && sshSession.stream) {
const { cols, rows } = message.data;
sshSession.stream.setWindow(rows, cols);
}
break;
}
case "disconnect": {
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
ws.send(
JSON.stringify({
type: "disconnected",
message: "Disconnected from container",
}),
);
}
break;
}
case "ping": {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: "pong" }));
}
break;
}
default:
dockerConsoleLogger.warn("Unknown message type", {
operation: "ws_message",
type: message.type,
});
}
} catch (error) {
dockerConsoleLogger.error("WebSocket message error", error, {
operation: "ws_message",
sessionId,
});
ws.send(
JSON.stringify({
type: "error",
message: error instanceof Error ? error.message : "An error occurred",
}),
);
}
});
ws.on("close", () => {
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
ws.on("error", (error) => {
dockerConsoleLogger.error("WebSocket error", error, {
operation: "ws_error",
sessionId,
});
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
});
process.on("SIGTERM", () => {
activeSessions.forEach((session, sessionId) => {
if (session.stream) {
session.stream.end();
}
session.client.end();
});
activeSessions.clear();
wss.close(() => {
process.exit(0);
});
});

2340
src/backend/ssh/docker.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -14,6 +14,7 @@ import { sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
import { UserCrypto } from "../utils/user-crypto.js";
import { createSocks5Connection } from "../utils/socks5-helper.js";
interface ConnectToHostData {
cols: number;
@@ -31,6 +32,13 @@ interface ConnectToHostData {
credentialId?: number;
userId?: string;
forceKeyboardInteractive?: boolean;
jumpHosts?: Array<{ hostId: number }>;
useSocks5?: boolean;
socks5Host?: string;
socks5Port?: number;
socks5Username?: string;
socks5Password?: string;
socks5ProxyChain?: unknown;
};
initialPath?: string;
executeCommand?: string;
@@ -57,6 +65,179 @@ const userCrypto = UserCrypto.getInstance();
const userConnections = new Map<string, Set<WebSocket>>();
async function resolveJumpHost(
hostId: number,
userId: string,
): Promise<any | null> {
try {
const hosts = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId))),
"ssh_data",
userId,
);
if (hosts.length === 0) {
return null;
}
const host = hosts[0];
if (host.credentialId) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, host.credentialId as number),
eq(sshCredentials.userId, userId),
),
),
"ssh_credentials",
userId,
);
if (credentials.length > 0) {
const credential = credentials[0];
return {
...host,
password: credential.password,
key:
credential.private_key || credential.privateKey || credential.key,
keyPassword: credential.key_password || credential.keyPassword,
keyType: credential.key_type || credential.keyType,
authType: credential.auth_type || credential.authType,
};
}
}
return host;
} catch (error) {
sshLogger.error("Failed to resolve jump host", error, {
operation: "resolve_jump_host",
hostId,
userId,
});
return null;
}
}
async function createJumpHostChain(
jumpHosts: Array<{ hostId: number }>,
userId: string,
): Promise<Client | null> {
if (!jumpHosts || jumpHosts.length === 0) {
return null;
}
let currentClient: Client | null = null;
const clients: Client[] = [];
try {
const jumpHostConfigs = await Promise.all(
jumpHosts.map((jh) => resolveJumpHost(jh.hostId, userId)),
);
for (let i = 0; i < jumpHostConfigs.length; i++) {
if (!jumpHostConfigs[i]) {
sshLogger.error(`Jump host ${i + 1} not found`, undefined, {
operation: "jump_host_chain",
hostId: jumpHosts[i].hostId,
});
clients.forEach((c) => c.end());
return null;
}
}
for (let i = 0; i < jumpHostConfigs.length; i++) {
const jumpHostConfig = jumpHostConfigs[i];
const jumpClient = new Client();
clients.push(jumpClient);
const connected = await new Promise<boolean>((resolve) => {
const timeout = setTimeout(() => {
resolve(false);
}, 30000);
jumpClient.on("ready", () => {
clearTimeout(timeout);
resolve(true);
});
jumpClient.on("error", (err) => {
clearTimeout(timeout);
sshLogger.error(`Jump host ${i + 1} connection failed`, err, {
operation: "jump_host_connect",
hostId: jumpHostConfig.id,
ip: jumpHostConfig.ip,
});
resolve(false);
});
const connectConfig: any = {
host: jumpHostConfig.ip,
port: jumpHostConfig.port || 22,
username: jumpHostConfig.username,
tryKeyboard: true,
readyTimeout: 30000,
};
if (jumpHostConfig.authType === "password" && jumpHostConfig.password) {
connectConfig.password = jumpHostConfig.password;
} else if (jumpHostConfig.authType === "key" && jumpHostConfig.key) {
const cleanKey = jumpHostConfig.key
.trim()
.replace(/\r\n/g, "\n")
.replace(/\r/g, "\n");
connectConfig.privateKey = Buffer.from(cleanKey, "utf8");
if (jumpHostConfig.keyPassword) {
connectConfig.passphrase = jumpHostConfig.keyPassword;
}
}
if (currentClient) {
currentClient.forwardOut(
"127.0.0.1",
0,
jumpHostConfig.ip,
jumpHostConfig.port || 22,
(err, stream) => {
if (err) {
clearTimeout(timeout);
resolve(false);
return;
}
connectConfig.sock = stream;
jumpClient.connect(connectConfig);
},
);
} else {
jumpClient.connect(connectConfig);
}
});
if (!connected) {
clients.forEach((c) => c.end());
return null;
}
currentClient = jumpClient;
}
return currentClient;
} catch (error) {
sshLogger.error("Failed to create jump host chain", error, {
operation: "jump_host_chain",
});
clients.forEach((c) => c.end());
return null;
}
}
const wss = new WebSocketServer({
port: 30002,
verifyClient: async (info) => {
@@ -79,6 +260,7 @@ const wss = new WebSocketServer({
}
const existingConnections = userConnections.get(payload.userId);
if (existingConnections && existingConnections.size >= 3) {
return false;
}
@@ -147,11 +329,16 @@ wss.on("connection", async (ws: WebSocket, req) => {
let sshConn: Client | null = null;
let sshStream: ClientChannel | null = null;
let pingInterval: NodeJS.Timeout | null = null;
let keyboardInteractiveFinish: ((responses: string[]) => void) | null = null;
let totpPromptSent = false;
let totpAttempts = 0;
let totpTimeout: NodeJS.Timeout | null = null;
let isKeyboardInteractive = false;
let keyboardInteractiveResponded = false;
let isConnecting = false;
let isConnected = false;
let isCleaningUp = false;
let isShellInitializing = false;
ws.on("close", () => {
const userWs = userConnections.get(userId);
@@ -262,9 +449,15 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "totp_response": {
const totpData = data as TOTPResponseData;
if (keyboardInteractiveFinish && totpData?.code) {
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
const totpCode = totpData.code;
totpAttempts++;
keyboardInteractiveFinish([totpCode]);
keyboardInteractiveFinish = null;
totpPromptSent = false;
} else {
sshLogger.warn("TOTP response received but no callback available", {
operation: "totp_response_error",
@@ -285,6 +478,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "password_response": {
const passwordData = data as TOTPResponseData;
if (keyboardInteractiveFinish && passwordData?.code) {
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
const password = passwordData.code;
keyboardInteractiveFinish([password]);
keyboardInteractiveFinish = null;
@@ -417,10 +614,28 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
if (isConnecting || isConnected) {
sshLogger.warn("Connection already in progress or established", {
operation: "ssh_connect",
hostId: id,
isConnecting,
isConnected,
});
ws.send(
JSON.stringify({
type: "error",
message: "Connection already in progress",
code: "DUPLICATE_CONNECTION",
}),
);
return;
}
isConnecting = true;
sshConn = new Client();
const connectionTimeout = setTimeout(() => {
if (sshConn) {
if (sshConn && isConnecting && !isConnected) {
sshLogger.error("SSH connection timeout", undefined, {
operation: "ssh_connect",
hostId: id,
@@ -433,7 +648,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
);
cleanupSSH(connectionTimeout);
}
}, 60000);
}, 120000);
let resolvedCredentials = { password, key, keyPassword, keyType, authType };
let authMethodNotAvailable = false;
@@ -498,7 +713,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
sshConn.on("ready", () => {
clearTimeout(connectionTimeout);
if (!sshConn) {
const conn = sshConn;
if (!conn || isCleaningUp || !sshConn) {
sshLogger.warn(
"SSH connection was cleaned up before shell could be created",
{
@@ -507,6 +724,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
ip,
port,
username,
isCleaningUp,
connNull: !conn,
sshConnNull: !sshConn,
},
);
ws.send(
@@ -519,13 +739,69 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
sshConn.shell(
isShellInitializing = true;
isConnecting = false;
isConnected = true;
if (!sshConn) {
sshLogger.error(
"SSH connection became null right before shell creation",
{
operation: "ssh_shell",
hostId: id,
},
);
ws.send(
JSON.stringify({
type: "error",
message: "SSH connection lost during setup",
}),
);
isShellInitializing = false;
return;
}
sshLogger.info("Creating shell", {
operation: "ssh_shell_start",
hostId: id,
ip,
port,
username,
});
let shellCallbackReceived = false;
const shellTimeout = setTimeout(() => {
if (!shellCallbackReceived && isShellInitializing) {
sshLogger.error("Shell creation timeout - no response from server", {
operation: "ssh_shell_timeout",
hostId: id,
ip,
port,
username,
});
isShellInitializing = false;
ws.send(
JSON.stringify({
type: "error",
message:
"Shell creation timeout. The server may not support interactive shells or the connection was interrupted.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 15000);
conn.shell(
{
rows: data.rows,
cols: data.cols,
term: "xterm-256color",
} as PseudoTtyOptions,
(err, stream) => {
shellCallbackReceived = true;
clearTimeout(shellTimeout);
isShellInitializing = false;
if (err) {
sshLogger.error("Shell error", err, {
operation: "ssh_shell",
@@ -540,6 +816,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
message: "Shell error: " + err.message,
}),
);
cleanupSSH(connectionTimeout);
return;
}
@@ -589,8 +866,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
);
});
setupPingInterval();
if (initialPath && initialPath.trim() !== "") {
const cdCommand = `cd "${initialPath.replace(/"/g, '\\"')}" && pwd\n`;
stream.write(cdCommand);
@@ -727,6 +1002,31 @@ wss.on("connection", async (ws: WebSocket, req) => {
sshConn.on("close", () => {
clearTimeout(connectionTimeout);
if (isShellInitializing || (isConnected && !sshStream)) {
sshLogger.warn("SSH connection closed during shell initialization", {
operation: "ssh_close_during_init",
hostId: id,
ip,
port,
username,
isShellInitializing,
hasStream: !!sshStream,
});
ws.send(
JSON.stringify({
type: "error",
message:
"Connection closed during shell initialization. The server may have rejected the shell request.",
}),
);
} else if (!sshStream) {
ws.send(
JSON.stringify({
type: "disconnected",
message: "Connection closed",
}),
);
}
cleanupSSH(connectionTimeout);
});
@@ -774,6 +1074,25 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
totpTimeout = setTimeout(() => {
if (keyboardInteractiveFinish) {
keyboardInteractiveFinish = null;
totpPromptSent = false;
sshLogger.warn("TOTP prompt timeout", {
operation: "totp_timeout",
hostId: id,
});
ws.send(
JSON.stringify({
type: "error",
message: "TOTP verification timeout. Please reconnect.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 180000);
ws.send(
JSON.stringify({
type: "totp_required",
@@ -808,6 +1127,24 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
totpTimeout = setTimeout(() => {
if (keyboardInteractiveFinish) {
keyboardInteractiveFinish = null;
keyboardInteractiveResponded = false;
sshLogger.warn("Password prompt timeout", {
operation: "password_timeout",
hostId: id,
});
ws.send(
JSON.stringify({
type: "error",
message: "Password verification timeout. Please reconnect.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 180000);
ws.send(
JSON.stringify({
type: "password_required",
@@ -836,9 +1173,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
tryKeyboard: true,
keepaliveInterval: 30000,
keepaliveCountMax: 3,
readyTimeout: 60000,
readyTimeout: 120000,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
timeout: 120000,
env: {
TERM: "xterm-256color",
LANG: "en_US.UTF-8",
@@ -914,9 +1252,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
if (!hostConfig.forceKeyboardInteractive) {
connectConfig.password = resolvedCredentials.password;
}
connectConfig.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.key
@@ -969,7 +1305,111 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
sshConn.connect(connectConfig);
if (
hostConfig.useSocks5 &&
(hostConfig.socks5Host ||
(hostConfig.socks5ProxyChain &&
(hostConfig.socks5ProxyChain as any).length > 0))
) {
try {
const socks5Socket = await createSocks5Connection(ip, port, {
useSocks5: hostConfig.useSocks5,
socks5Host: hostConfig.socks5Host,
socks5Port: hostConfig.socks5Port,
socks5Username: hostConfig.socks5Username,
socks5Password: hostConfig.socks5Password,
socks5ProxyChain: hostConfig.socks5ProxyChain as any,
});
if (socks5Socket) {
connectConfig.sock = socks5Socket;
sshConn.connect(connectConfig);
return;
}
} catch (socks5Error) {
sshLogger.error("SOCKS5 connection failed", socks5Error, {
operation: "socks5_connect",
hostId: id,
proxyHost: hostConfig.socks5Host,
proxyPort: hostConfig.socks5Port || 1080,
});
ws.send(
JSON.stringify({
type: "error",
message:
"SOCKS5 proxy connection failed: " +
(socks5Error instanceof Error
? socks5Error.message
: "Unknown error"),
}),
);
cleanupSSH(connectionTimeout);
return;
}
}
if (
hostConfig.jumpHosts &&
hostConfig.jumpHosts.length > 0 &&
hostConfig.userId
) {
try {
const jumpClient = await createJumpHostChain(
hostConfig.jumpHosts,
hostConfig.userId,
);
if (!jumpClient) {
sshLogger.error("Failed to establish jump host chain");
ws.send(
JSON.stringify({
type: "error",
message: "Failed to connect through jump hosts",
}),
);
cleanupSSH(connectionTimeout);
return;
}
jumpClient.forwardOut("127.0.0.1", 0, ip, port, (err, stream) => {
if (err) {
sshLogger.error("Failed to forward through jump host", err, {
operation: "ssh_jump_forward",
hostId: id,
ip,
port,
});
ws.send(
JSON.stringify({
type: "error",
message: "Failed to forward through jump host: " + err.message,
}),
);
jumpClient.end();
cleanupSSH(connectionTimeout);
return;
}
connectConfig.sock = stream;
sshConn.connect(connectConfig);
});
} catch (error) {
sshLogger.error("Jump host error", error, {
operation: "ssh_jump_host",
hostId: id,
});
ws.send(
JSON.stringify({
type: "error",
message: "Failed to connect through jump hosts",
}),
);
cleanupSSH(connectionTimeout);
return;
}
} else {
sshConn.connect(connectConfig);
}
}
function handleResize(data: ResizeData) {
@@ -982,13 +1422,31 @@ wss.on("connection", async (ws: WebSocket, req) => {
}
function cleanupSSH(timeoutId?: NodeJS.Timeout) {
if (isCleaningUp) {
return;
}
if (isShellInitializing) {
sshLogger.warn(
"Cleanup attempted during shell initialization, deferring",
{
operation: "cleanup_deferred",
userId,
},
);
setTimeout(() => cleanupSSH(timeoutId), 100);
return;
}
isCleaningUp = true;
if (timeoutId) {
clearTimeout(timeoutId);
}
if (pingInterval) {
clearInterval(pingInterval);
pingInterval = null;
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
if (sshStream) {
@@ -1016,24 +1474,21 @@ wss.on("connection", async (ws: WebSocket, req) => {
}
totpPromptSent = false;
totpAttempts = 0;
isKeyboardInteractive = false;
keyboardInteractiveResponded = false;
keyboardInteractiveFinish = null;
isConnecting = false;
isConnected = false;
isCleaningUp = false;
}
function setupPingInterval() {
pingInterval = setInterval(() => {
if (sshConn && sshStream) {
try {
sshStream.write("\x00");
} catch (e: unknown) {
sshLogger.error(
"SSH keepalive failed: " +
(e instanceof Error ? e.message : "Unknown error"),
);
cleanupSSH();
}
}
}, 60000);
}
// Note: PTY-level keepalive (writing \x00 to the stream) was removed.
// It was causing ^@ characters to appear in terminals with echoctl enabled.
// SSH-level keepalive is configured via connectConfig (keepaliveInterval,
// keepaliveCountMax, tcpKeepAlive), which handles connection health monitoring
// without producing visible output on the terminal.
//
// See: https://github.com/Termix-SSH/Support/issues/232
// See: https://github.com/Termix-SSH/Support/issues/309
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,101 @@
import type { Client } from "ssh2";
export function execCommand(
client: Client,
command: string,
timeoutMs = 30000,
): Promise<{
stdout: string;
stderr: string;
code: number | null;
}> {
return new Promise((resolve, reject) => {
let settled = false;
let stream: any = null;
const timeout = setTimeout(() => {
if (!settled) {
settled = true;
cleanup();
reject(new Error(`Command timeout after ${timeoutMs}ms: ${command}`));
}
}, timeoutMs);
const cleanup = () => {
clearTimeout(timeout);
if (stream) {
try {
stream.removeAllListeners();
if (stream.stderr) {
stream.stderr.removeAllListeners();
}
stream.destroy();
} catch (error) {
// Ignore cleanup errors
}
}
};
client.exec(command, { pty: false }, (err, _stream) => {
if (err) {
if (!settled) {
settled = true;
cleanup();
reject(err);
}
return;
}
stream = _stream;
let stdout = "";
let stderr = "";
let exitCode: number | null = null;
stream
.on("close", (code: number | undefined) => {
if (!settled) {
settled = true;
exitCode = typeof code === "number" ? code : null;
cleanup();
resolve({ stdout, stderr, code: exitCode });
}
})
.on("data", (data: Buffer) => {
stdout += data.toString("utf8");
})
.on("error", (streamErr: Error) => {
if (!settled) {
settled = true;
cleanup();
reject(streamErr);
}
});
if (stream.stderr) {
stream.stderr
.on("data", (data: Buffer) => {
stderr += data.toString("utf8");
})
.on("error", (stderrErr: Error) => {
if (!settled) {
settled = true;
cleanup();
reject(stderrErr);
}
});
}
});
});
}
export function toFixedNum(
n: number | null | undefined,
digits = 2,
): number | null {
if (typeof n !== "number" || !Number.isFinite(n)) return null;
return Number(n.toFixed(digits));
}
export function kibToGiB(kib: number): number {
return kib / (1024 * 1024);
}

View File

@@ -0,0 +1,91 @@
import type { Client } from "ssh2";
import { execCommand, toFixedNum } from "./common-utils.js";
function parseCpuLine(
cpuLine: string,
): { total: number; idle: number } | undefined {
const parts = cpuLine.trim().split(/\s+/);
if (parts[0] !== "cpu") return undefined;
const nums = parts
.slice(1)
.map((n) => Number(n))
.filter((n) => Number.isFinite(n));
if (nums.length < 4) return undefined;
const idle = (nums[3] ?? 0) + (nums[4] ?? 0);
const total = nums.reduce((a, b) => a + b, 0);
return { total, idle };
}
export async function collectCpuMetrics(client: Client): Promise<{
percent: number | null;
cores: number | null;
load: [number, number, number] | null;
}> {
let cpuPercent: number | null = null;
let cores: number | null = null;
let loadTriplet: [number, number, number] | null = null;
try {
const [stat1, loadAvgOut, coresOut] = await Promise.race([
Promise.all([
execCommand(client, "cat /proc/stat"),
execCommand(client, "cat /proc/loadavg"),
execCommand(
client,
"nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
),
]),
new Promise<never>((_, reject) =>
setTimeout(
() => reject(new Error("CPU metrics collection timeout")),
25000,
),
),
]);
await new Promise((r) => setTimeout(r, 500));
const stat2 = await execCommand(client, "cat /proc/stat");
const cpuLine1 = (
stat1.stdout.split("\n").find((l) => l.startsWith("cpu ")) || ""
).trim();
const cpuLine2 = (
stat2.stdout.split("\n").find((l) => l.startsWith("cpu ")) || ""
).trim();
const a = parseCpuLine(cpuLine1);
const b = parseCpuLine(cpuLine2);
if (a && b) {
const totalDiff = b.total - a.total;
const idleDiff = b.idle - a.idle;
const used = totalDiff - idleDiff;
if (totalDiff > 0)
cpuPercent = Math.max(0, Math.min(100, (used / totalDiff) * 100));
}
const laParts = loadAvgOut.stdout.trim().split(/\s+/);
if (laParts.length >= 3) {
loadTriplet = [
Number(laParts[0]),
Number(laParts[1]),
Number(laParts[2]),
].map((v) => (Number.isFinite(v) ? Number(v) : 0)) as [
number,
number,
number,
];
}
const coresNum = Number((coresOut.stdout || "").trim());
cores = Number.isFinite(coresNum) && coresNum > 0 ? coresNum : null;
} catch (e) {
cpuPercent = null;
cores = null;
loadTriplet = null;
}
return {
percent: toFixedNum(cpuPercent, 0),
cores,
load: loadTriplet,
};
}

View File

@@ -0,0 +1,67 @@
import type { Client } from "ssh2";
import { execCommand, toFixedNum } from "./common-utils.js";
export async function collectDiskMetrics(client: Client): Promise<{
percent: number | null;
usedHuman: string | null;
totalHuman: string | null;
availableHuman: string | null;
}> {
let diskPercent: number | null = null;
let usedHuman: string | null = null;
let totalHuman: string | null = null;
let availableHuman: string | null = null;
try {
const [diskOutHuman, diskOutBytes] = await Promise.all([
execCommand(client, "df -h -P / | tail -n +2"),
execCommand(client, "df -B1 -P / | tail -n +2"),
]);
const humanLine =
diskOutHuman.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean)[0] || "";
const bytesLine =
diskOutBytes.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean)[0] || "";
const humanParts = humanLine.split(/\s+/);
const bytesParts = bytesLine.split(/\s+/);
if (humanParts.length >= 6 && bytesParts.length >= 6) {
totalHuman = humanParts[1] || null;
usedHuman = humanParts[2] || null;
availableHuman = humanParts[3] || null;
const totalBytes = Number(bytesParts[1]);
const usedBytes = Number(bytesParts[2]);
if (
Number.isFinite(totalBytes) &&
Number.isFinite(usedBytes) &&
totalBytes > 0
) {
diskPercent = Math.max(
0,
Math.min(100, (usedBytes / totalBytes) * 100),
);
}
}
} catch (e) {
diskPercent = null;
usedHuman = null;
totalHuman = null;
availableHuman = null;
}
return {
percent: toFixedNum(diskPercent, 0),
usedHuman,
totalHuman,
availableHuman,
};
}

View File

@@ -0,0 +1,254 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import type {
FirewallMetrics,
FirewallChain,
FirewallRule,
} from "../../../types/stats-widgets.js";
function parseIptablesRule(line: string): FirewallRule | null {
if (!line.startsWith("-A ")) return null;
const rule: FirewallRule = {
chain: "",
target: "",
protocol: "all",
source: "0.0.0.0/0",
destination: "0.0.0.0/0",
};
const chainMatch = line.match(/^-A\s+(\S+)/);
if (chainMatch) {
rule.chain = chainMatch[1];
}
const targetMatch = line.match(/-j\s+(\S+)/);
if (targetMatch) {
rule.target = targetMatch[1];
}
const protocolMatch = line.match(/-p\s+(\S+)/);
if (protocolMatch) {
rule.protocol = protocolMatch[1];
}
const sourceMatch = line.match(/-s\s+(\S+)/);
if (sourceMatch) {
rule.source = sourceMatch[1];
}
const destMatch = line.match(/-d\s+(\S+)/);
if (destMatch) {
rule.destination = destMatch[1];
}
const dportMatch = line.match(/--dport\s+(\S+)/);
if (dportMatch) {
rule.dport = dportMatch[1];
}
const sportMatch = line.match(/--sport\s+(\S+)/);
if (sportMatch) {
rule.sport = sportMatch[1];
}
const stateMatch = line.match(/--state\s+(\S+)/);
if (stateMatch) {
rule.state = stateMatch[1];
}
const interfaceMatch = line.match(/-i\s+(\S+)/);
if (interfaceMatch) {
rule.interface = interfaceMatch[1];
}
return rule;
}
function parseIptablesOutput(output: string): FirewallChain[] {
const chains: Map<string, FirewallChain> = new Map();
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
const policyMatch = trimmed.match(/^:(\S+)\s+(\S+)/);
if (policyMatch) {
const [, chainName, policy] = policyMatch;
chains.set(chainName, {
name: chainName,
policy: policy,
rules: [],
});
continue;
}
const rule = parseIptablesRule(trimmed);
if (rule) {
let chain = chains.get(rule.chain);
if (!chain) {
chain = {
name: rule.chain,
policy: "ACCEPT",
rules: [],
};
chains.set(rule.chain, chain);
}
chain.rules.push(rule);
}
}
return Array.from(chains.values());
}
function parseNftablesOutput(output: string): FirewallChain[] {
const chains: FirewallChain[] = [];
let currentChain: FirewallChain | null = null;
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
const chainMatch = trimmed.match(
/chain\s+(\S+)\s*\{?\s*(?:type\s+\S+\s+hook\s+(\S+))?/,
);
if (chainMatch) {
if (currentChain) {
chains.push(currentChain);
}
currentChain = {
name: chainMatch[1].toUpperCase(),
policy: "ACCEPT",
rules: [],
};
continue;
}
if (currentChain && trimmed.startsWith("policy ")) {
const policyMatch = trimmed.match(/policy\s+(\S+)/);
if (policyMatch) {
currentChain.policy = policyMatch[1].toUpperCase();
}
continue;
}
if (currentChain && trimmed && !trimmed.startsWith("}")) {
const rule: FirewallRule = {
chain: currentChain.name,
target: "",
protocol: "all",
source: "0.0.0.0/0",
destination: "0.0.0.0/0",
};
if (trimmed.includes("accept")) rule.target = "ACCEPT";
else if (trimmed.includes("drop")) rule.target = "DROP";
else if (trimmed.includes("reject")) rule.target = "REJECT";
const tcpMatch = trimmed.match(/tcp\s+dport\s+(\S+)/);
if (tcpMatch) {
rule.protocol = "tcp";
rule.dport = tcpMatch[1];
}
const udpMatch = trimmed.match(/udp\s+dport\s+(\S+)/);
if (udpMatch) {
rule.protocol = "udp";
rule.dport = udpMatch[1];
}
const saddrMatch = trimmed.match(/saddr\s+(\S+)/);
if (saddrMatch) {
rule.source = saddrMatch[1];
}
const daddrMatch = trimmed.match(/daddr\s+(\S+)/);
if (daddrMatch) {
rule.destination = daddrMatch[1];
}
const iifMatch = trimmed.match(/iif\s+"?(\S+)"?/);
if (iifMatch) {
rule.interface = iifMatch[1].replace(/"/g, "");
}
const ctStateMatch = trimmed.match(/ct\s+state\s+(\S+)/);
if (ctStateMatch) {
rule.state = ctStateMatch[1].toUpperCase();
}
if (rule.target) {
currentChain.rules.push(rule);
}
}
if (trimmed === "}") {
if (currentChain) {
chains.push(currentChain);
currentChain = null;
}
}
}
if (currentChain) {
chains.push(currentChain);
}
return chains;
}
export async function collectFirewallMetrics(
client: Client,
): Promise<FirewallMetrics> {
try {
const iptablesResult = await execCommand(
client,
"iptables-save 2>/dev/null",
15000,
);
if (iptablesResult.stdout && iptablesResult.stdout.includes("*filter")) {
const chains = parseIptablesOutput(iptablesResult.stdout);
const hasRules = chains.some((c) => c.rules.length > 0);
return {
type: "iptables",
status: hasRules ? "active" : "inactive",
chains: chains.filter(
(c) =>
c.name === "INPUT" || c.name === "OUTPUT" || c.name === "FORWARD",
),
};
}
const nftResult = await execCommand(
client,
"nft list ruleset 2>/dev/null",
15000,
);
if (nftResult.stdout && nftResult.stdout.trim()) {
const chains = parseNftablesOutput(nftResult.stdout);
const hasRules = chains.some((c) => c.rules.length > 0);
return {
type: "nftables",
status: hasRules ? "active" : "inactive",
chains,
};
}
return {
type: "none",
status: "unknown",
chains: [],
};
} catch {
return {
type: "none",
status: "unknown",
chains: [],
};
}
}

View File

@@ -0,0 +1,137 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export interface LoginRecord {
user: string;
ip: string;
time: string;
status: "success" | "failed";
}
export interface LoginStats {
recentLogins: LoginRecord[];
failedLogins: LoginRecord[];
totalLogins: number;
uniqueIPs: number;
}
export async function collectLoginStats(client: Client): Promise<LoginStats> {
const recentLogins: LoginRecord[] = [];
const failedLogins: LoginRecord[] = [];
const ipSet = new Set<string>();
try {
const lastOut = await execCommand(
client,
"last -n 20 -F -w | grep -v 'reboot' | grep -v 'wtmp' | head -20",
);
const lastLines = lastOut.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
for (const line of lastLines) {
const parts = line.split(/\s+/);
if (parts.length >= 10) {
const user = parts[0];
const tty = parts[1];
const ip =
parts[2] === ":" || parts[2].startsWith(":") ? "local" : parts[2];
const timeStart = parts.indexOf(
parts.find((p) => /^(Mon|Tue|Wed|Thu|Fri|Sat|Sun)/.test(p)) || "",
);
if (timeStart > 0 && parts.length > timeStart + 4) {
const timeStr = parts.slice(timeStart, timeStart + 5).join(" ");
if (user && user !== "wtmp" && tty !== "system") {
let parsedTime: string;
try {
const date = new Date(timeStr);
parsedTime = isNaN(date.getTime())
? new Date().toISOString()
: date.toISOString();
} catch (e) {
parsedTime = new Date().toISOString();
}
recentLogins.push({
user,
ip,
time: parsedTime,
status: "success",
});
if (ip !== "local") {
ipSet.add(ip);
}
}
}
}
}
} catch (e) {}
try {
const failedOut = await execCommand(
client,
"grep 'Failed password' /var/log/auth.log 2>/dev/null | tail -10 || grep 'authentication failure' /var/log/secure 2>/dev/null | tail -10 || echo ''",
);
const failedLines = failedOut.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
for (const line of failedLines) {
let user = "unknown";
let ip = "unknown";
let timeStr = "";
const userMatch = line.match(/for (?:invalid user )?(\S+)/);
if (userMatch) {
user = userMatch[1];
}
const ipMatch = line.match(/from (\d+\.\d+\.\d+\.\d+)/);
if (ipMatch) {
ip = ipMatch[1];
}
const dateMatch = line.match(/^(\w+\s+\d+\s+\d+:\d+:\d+)/);
if (dateMatch) {
const currentYear = new Date().getFullYear();
timeStr = `${currentYear} ${dateMatch[1]}`;
}
if (user && ip) {
let parsedTime: string;
try {
const date = timeStr ? new Date(timeStr) : new Date();
parsedTime = isNaN(date.getTime())
? new Date().toISOString()
: date.toISOString();
} catch (e) {
parsedTime = new Date().toISOString();
}
failedLogins.push({
user,
ip,
time: parsedTime,
status: "failed",
});
if (ip !== "unknown") {
ipSet.add(ip);
}
}
}
} catch (e) {}
return {
recentLogins: recentLogins.slice(0, 10),
failedLogins: failedLogins.slice(0, 10),
totalLogins: recentLogins.length,
uniqueIPs: ipSet.size,
};
}

View File

@@ -0,0 +1,41 @@
import type { Client } from "ssh2";
import { execCommand, toFixedNum, kibToGiB } from "./common-utils.js";
export async function collectMemoryMetrics(client: Client): Promise<{
percent: number | null;
usedGiB: number | null;
totalGiB: number | null;
}> {
let memPercent: number | null = null;
let usedGiB: number | null = null;
let totalGiB: number | null = null;
try {
const memInfo = await execCommand(client, "cat /proc/meminfo");
const lines = memInfo.stdout.split("\n");
const getVal = (key: string) => {
const line = lines.find((l) => l.startsWith(key));
if (!line) return null;
const m = line.match(/\d+/);
return m ? Number(m[0]) : null;
};
const totalKb = getVal("MemTotal:");
const availKb = getVal("MemAvailable:");
if (totalKb && availKb && totalKb > 0) {
const usedKb = totalKb - availKb;
memPercent = Math.max(0, Math.min(100, (usedKb / totalKb) * 100));
usedGiB = kibToGiB(usedKb);
totalGiB = kibToGiB(totalKb);
}
} catch (e) {
memPercent = null;
usedGiB = null;
totalGiB = null;
}
return {
percent: toFixedNum(memPercent, 0),
usedGiB: usedGiB ? toFixedNum(usedGiB, 2) : null,
totalGiB: totalGiB ? toFixedNum(totalGiB, 2) : null,
};
}

View File

@@ -0,0 +1,74 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export async function collectNetworkMetrics(client: Client): Promise<{
interfaces: Array<{
name: string;
ip: string;
state: string;
rxBytes: string | null;
txBytes: string | null;
}>;
}> {
const interfaces: Array<{
name: string;
ip: string;
state: string;
rxBytes: string | null;
txBytes: string | null;
}> = [];
try {
const ifconfigOut = await execCommand(
client,
"ip -o addr show | awk '{print $2,$4}' | grep -v '^lo'",
);
const netStatOut = await execCommand(
client,
"ip -o link show | awk '{gsub(/:/, \"\", $2); print $2,$9}'",
);
const addrs = ifconfigOut.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
const states = netStatOut.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
const ifMap = new Map<string, { ip: string; state: string }>();
for (const line of addrs) {
const parts = line.split(/\s+/);
if (parts.length >= 2) {
const name = parts[0];
const ip = parts[1].split("/")[0];
if (!ifMap.has(name)) ifMap.set(name, { ip, state: "UNKNOWN" });
}
}
for (const line of states) {
const parts = line.split(/\s+/);
if (parts.length >= 2) {
const name = parts[0];
const state = parts[1];
const existing = ifMap.get(name);
if (existing) {
existing.state = state;
}
}
}
for (const [name, data] of ifMap.entries()) {
interfaces.push({
name,
ip: data.ip,
state: data.state,
rxBytes: null,
txBytes: null,
});
}
} catch (e) {}
return { interfaces };
}

View File

@@ -0,0 +1,155 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import type { PortsMetrics, ListeningPort } from "../../../types/stats-widgets.js";
function parseSsOutput(output: string): ListeningPort[] {
const ports: ListeningPort[] = [];
const lines = output.split("\n").slice(1);
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const parts = trimmed.split(/\s+/);
if (parts.length < 5) continue;
const protocol = parts[0]?.toLowerCase();
if (protocol !== "tcp" && protocol !== "udp") continue;
const state = parts[1];
const localAddr = parts[4];
if (!localAddr) continue;
const lastColon = localAddr.lastIndexOf(":");
if (lastColon === -1) continue;
const address = localAddr.substring(0, lastColon);
const portStr = localAddr.substring(lastColon + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) continue;
const portEntry: ListeningPort = {
protocol: protocol as "tcp" | "udp",
localAddress: address.replace(/^\[|\]$/g, ""),
localPort: port,
state: protocol === "tcp" ? state : undefined,
};
const processInfo = parts[6];
if (processInfo && processInfo.startsWith("users:")) {
const pidMatch = processInfo.match(/pid=(\d+)/);
const nameMatch = processInfo.match(/\("([^"]+)"/);
if (pidMatch) portEntry.pid = parseInt(pidMatch[1], 10);
if (nameMatch) portEntry.process = nameMatch[1];
}
ports.push(portEntry);
}
return ports;
}
function parseNetstatOutput(output: string): ListeningPort[] {
const ports: ListeningPort[] = [];
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const parts = trimmed.split(/\s+/);
if (parts.length < 4) continue;
const proto = parts[0]?.toLowerCase();
if (!proto) continue;
let protocol: "tcp" | "udp";
if (proto.startsWith("tcp")) {
protocol = "tcp";
} else if (proto.startsWith("udp")) {
protocol = "udp";
} else {
continue;
}
const localAddr = parts[3];
if (!localAddr) continue;
const lastColon = localAddr.lastIndexOf(":");
if (lastColon === -1) continue;
const address = localAddr.substring(0, lastColon);
const portStr = localAddr.substring(lastColon + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) continue;
const portEntry: ListeningPort = {
protocol,
localAddress: address,
localPort: port,
};
if (protocol === "tcp" && parts.length >= 6) {
portEntry.state = parts[5];
}
const pidProgram = parts[parts.length - 1];
if (pidProgram && pidProgram.includes("/")) {
const [pidStr, process] = pidProgram.split("/");
const pid = parseInt(pidStr, 10);
if (!isNaN(pid)) portEntry.pid = pid;
if (process) portEntry.process = process;
}
ports.push(portEntry);
}
return ports;
}
export async function collectPortsMetrics(
client: Client,
): Promise<PortsMetrics> {
try {
const ssResult = await execCommand(
client,
"ss -tulnp 2>/dev/null",
15000,
);
if (ssResult.stdout && ssResult.stdout.includes("Local")) {
const ports = parseSsOutput(ssResult.stdout);
return {
source: "ss",
ports: ports.sort((a, b) => a.localPort - b.localPort),
};
}
const netstatResult = await execCommand(
client,
"netstat -tulnp 2>/dev/null",
15000,
);
if (netstatResult.stdout && netstatResult.stdout.includes("Local")) {
const ports = parseNetstatOutput(netstatResult.stdout);
return {
source: "netstat",
ports: ports.sort((a, b) => a.localPort - b.localPort),
};
}
return {
source: "none",
ports: [],
};
} catch {
return {
source: "none",
ports: [],
};
}
}

View File

@@ -0,0 +1,64 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export async function collectProcessesMetrics(client: Client): Promise<{
total: number | null;
running: number | null;
top: Array<{
pid: string;
user: string;
cpu: string;
mem: string;
command: string;
}>;
}> {
let totalProcesses: number | null = null;
let runningProcesses: number | null = null;
const topProcesses: Array<{
pid: string;
user: string;
cpu: string;
mem: string;
command: string;
}> = [];
try {
const psOut = await execCommand(client, "ps aux --sort=-%cpu | head -n 11");
const psLines = psOut.stdout
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
if (psLines.length > 1) {
for (let i = 1; i < Math.min(psLines.length, 11); i++) {
const parts = psLines[i].split(/\s+/);
if (parts.length >= 11) {
const cpuVal = Number(parts[2]);
const memVal = Number(parts[3]);
topProcesses.push({
pid: parts[1],
user: parts[0],
cpu: Number.isFinite(cpuVal) ? cpuVal.toString() : "0",
mem: Number.isFinite(memVal) ? memVal.toString() : "0",
command: parts.slice(10).join(" ").substring(0, 50),
});
}
}
}
const procCount = await execCommand(client, "ps aux | wc -l");
const runningCount = await execCommand(client, "ps aux | grep -c ' R '");
const totalCount = Number(procCount.stdout.trim()) - 1;
totalProcesses = Number.isFinite(totalCount) ? totalCount : null;
const runningCount2 = Number(runningCount.stdout.trim());
runningProcesses = Number.isFinite(runningCount2) ? runningCount2 : null;
} catch (e) {}
return {
total: totalProcesses,
running: runningProcesses,
top: topProcesses,
};
}

View File

@@ -0,0 +1,34 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export async function collectSystemMetrics(client: Client): Promise<{
hostname: string | null;
kernel: string | null;
os: string | null;
}> {
let hostname: string | null = null;
let kernel: string | null = null;
let os: string | null = null;
try {
const hostnameOut = await execCommand(client, "hostname");
const kernelOut = await execCommand(client, "uname -r");
const osOut = await execCommand(
client,
"cat /etc/os-release | grep '^PRETTY_NAME=' | cut -d'\"' -f2",
);
hostname = hostnameOut.stdout.trim() || null;
kernel = kernelOut.stdout.trim() || null;
os = osOut.stdout.trim() || null;
} catch (e) {
// No error log
}
return {
hostname,
kernel,
os,
};
}

View File

@@ -0,0 +1,30 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export async function collectUptimeMetrics(client: Client): Promise<{
seconds: number | null;
formatted: string | null;
}> {
let uptimeSeconds: number | null = null;
let uptimeFormatted: string | null = null;
try {
const uptimeOut = await execCommand(client, "cat /proc/uptime");
const uptimeParts = uptimeOut.stdout.trim().split(/\s+/);
if (uptimeParts.length >= 1) {
uptimeSeconds = Number(uptimeParts[0]);
if (Number.isFinite(uptimeSeconds)) {
const days = Math.floor(uptimeSeconds / 86400);
const hours = Math.floor((uptimeSeconds % 86400) / 3600);
const minutes = Math.floor((uptimeSeconds % 3600) / 60);
uptimeFormatted = `${days}d ${hours}h ${minutes}m`;
}
}
} catch (e) {}
return {
seconds: uptimeSeconds,
formatted: uptimeFormatted,
};
}

View File

@@ -21,7 +21,7 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
if (persistentConfig.parsed) {
Object.assign(process.env, persistentConfig.parsed);
}
} catch {}
} catch (error) {}
let version = "unknown";
@@ -102,6 +102,8 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
await import("./ssh/tunnel.js");
await import("./ssh/file-manager.js");
await import("./ssh/server-stats.js");
await import("./ssh/docker.js");
await import("./ssh/docker-console.js");
await import("./dashboard.js");
process.on("SIGINT", () => {

145
src/backend/swagger.ts Normal file
View File

@@ -0,0 +1,145 @@
import swaggerJSDoc from "swagger-jsdoc";
import path from "path";
import { fileURLToPath } from "url";
import { promises as fs } from "fs";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const projectRoot = path.join(__dirname, "..", "..", "..");
const swaggerOptions: swaggerJSDoc.Options = {
definition: {
openapi: "3.0.3",
info: {
title: "Termix API",
version: "0.0.0",
description: "Termix Backend API Reference",
},
servers: [
{
url: "http://localhost:30001",
description: "Main database and authentication server",
},
{
url: "http://localhost:30003",
description: "SSH tunnel management server",
},
{
url: "http://localhost:30004",
description: "SSH file manager server",
},
{
url: "http://localhost:30005",
description: "Server statistics and monitoring server",
},
{
url: "http://localhost:30006",
description: "Dashboard server",
},
{
url: "http://localhost:30007",
description: "Docker management server",
},
],
components: {
securitySchemes: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT",
},
},
schemas: {
Error: {
type: "object",
properties: {
error: { type: "string" },
details: { type: "string" },
},
},
},
},
security: [
{
bearerAuth: [],
},
],
tags: [
{
name: "Alerts",
description: "System alerts and notifications management",
},
{
name: "Credentials",
description: "SSH credential management",
},
{
name: "Network Topology",
description: "Network topology visualization and management",
},
{
name: "RBAC",
description: "Role-based access control for host sharing",
},
{
name: "Snippets",
description: "Command snippet management",
},
{
name: "Terminal",
description: "Terminal command history",
},
{
name: "Users",
description: "User management and authentication",
},
{
name: "Dashboard",
description: "Dashboard statistics and activity",
},
{
name: "Docker",
description: "Docker container management",
},
{
name: "SSH Tunnels",
description: "SSH tunnel connection management",
},
{
name: "Server Stats",
description: "Server status monitoring and metrics collection",
},
{
name: "File Manager",
description: "SSH file management operations",
},
],
},
apis: [
path.join(projectRoot, "src", "backend", "database", "routes", "*.ts"),
path.join(projectRoot, "src", "backend", "dashboard.ts"),
path.join(projectRoot, "src", "backend", "ssh", "*.ts"),
],
};
async function generateOpenAPISpec() {
try {
const swaggerSpec = swaggerJSDoc(swaggerOptions);
const outputPath = path.join(projectRoot, "openapi.json");
await fs.writeFile(
outputPath,
JSON.stringify(swaggerSpec, null, 2),
"utf-8",
);
} catch (error) {
console.error("Failed to generate OpenAPI specification:", error);
process.exit(1);
}
}
generateOpenAPISpec();
export { swaggerOptions, generateOpenAPISpec };

View File

@@ -85,24 +85,25 @@ class AuthManager {
await this.userCrypto.setupUserEncryption(userId, password);
}
async registerOIDCUser(userId: string): Promise<void> {
await this.userCrypto.setupOIDCUserEncryption(userId);
async registerOIDCUser(
userId: string,
sessionDurationMs: number,
): Promise<void> {
await this.userCrypto.setupOIDCUserEncryption(userId, sessionDurationMs);
}
async authenticateOIDCUser(userId: string): Promise<boolean> {
const authenticated = await this.userCrypto.authenticateOIDCUser(userId);
async authenticateOIDCUser(
userId: string,
deviceType?: DeviceType,
): Promise<boolean> {
const sessionDurationMs =
deviceType === "desktop" || deviceType === "mobile"
? 30 * 24 * 60 * 60 * 1000
: 7 * 24 * 60 * 60 * 1000;
if (authenticated) {
await this.performLazyEncryptionMigration(userId);
}
return authenticated;
}
async authenticateUser(userId: string, password: string): Promise<boolean> {
const authenticated = await this.userCrypto.authenticateUser(
const authenticated = await this.userCrypto.authenticateOIDCUser(
userId,
password,
sessionDurationMs,
);
if (authenticated) {
@@ -112,6 +113,33 @@ class AuthManager {
return authenticated;
}
async authenticateUser(
userId: string,
password: string,
deviceType?: DeviceType,
): Promise<boolean> {
const sessionDurationMs =
deviceType === "desktop" || deviceType === "mobile"
? 30 * 24 * 60 * 60 * 1000
: 7 * 24 * 60 * 60 * 1000;
const authenticated = await this.userCrypto.authenticateUser(
userId,
password,
sessionDurationMs,
);
if (authenticated) {
await this.performLazyEncryptionMigration(userId);
}
return authenticated;
}
async convertToOIDCEncryption(userId: string): Promise<void> {
await this.userCrypto.convertToOIDCEncryption(userId);
}
private async performLazyEncryptionMigration(userId: string): Promise<void> {
try {
const userDataKey = this.getUserDataKey(userId);
@@ -126,9 +154,8 @@ class AuthManager {
return;
}
const { getSqlite, saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { getSqlite, saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
const sqlite = getSqlite();
@@ -141,6 +168,23 @@ class AuthManager {
if (migrationResult.migrated) {
await saveMemoryDatabaseToFile();
}
try {
const { CredentialSystemEncryptionMigration } =
await import("./credential-system-encryption-migration.js");
const credMigration = new CredentialSystemEncryptionMigration();
const credResult = await credMigration.migrateUserCredentials(userId);
if (credResult.migrated > 0) {
await saveMemoryDatabaseToFile();
}
} catch (error) {
databaseLogger.warn("Credential migration failed during login", {
operation: "login_credential_migration_failed",
userId,
error: error instanceof Error ? error.message : "Unknown error",
});
}
} catch (error) {
databaseLogger.error("Lazy encryption migration failed", error, {
operation: "lazy_encryption_migration_error",
@@ -203,9 +247,8 @@ class AuthManager {
});
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -306,9 +349,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -359,9 +401,8 @@ class AuthManager {
}
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -402,9 +443,8 @@ class AuthManager {
.where(sql`${sessions.expiresAt} < datetime('now')`);
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -540,9 +580,8 @@ class AuthManager {
.where(eq(sessions.id, payload.sessionId))
.then(async () => {
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
const remainingSessions = await db
@@ -686,9 +725,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(

View File

@@ -233,7 +233,7 @@ IP.3 = 0.0.0.0
let envContent = "";
try {
envContent = await fs.readFile(this.ENV_FILE, "utf8");
} catch {}
} catch (error) {}
let updatedContent = envContent;
let hasChanges = false;

View File

@@ -0,0 +1,131 @@
import { db } from "../database/db/index.js";
import { sshCredentials } from "../database/db/schema.js";
import { eq, and, or, isNull } from "drizzle-orm";
import { DataCrypto } from "./data-crypto.js";
import { SystemCrypto } from "./system-crypto.js";
import { FieldCrypto } from "./field-crypto.js";
import { databaseLogger } from "./logger.js";
export class CredentialSystemEncryptionMigration {
async migrateUserCredentials(userId: string): Promise<{
migrated: number;
failed: number;
skipped: number;
}> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
throw new Error("User must be logged in to migrate credentials");
}
const systemCrypto = SystemCrypto.getInstance();
const CSKEK = await systemCrypto.getCredentialSharingKey();
const credentials = await db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.userId, userId),
or(
isNull(sshCredentials.systemPassword),
isNull(sshCredentials.systemKey),
isNull(sshCredentials.systemKeyPassword),
),
),
);
let migrated = 0;
let failed = 0;
const skipped = 0;
for (const cred of credentials) {
try {
const plainPassword = cred.password
? FieldCrypto.decryptField(
cred.password,
userDEK,
cred.id.toString(),
"password",
)
: null;
const plainKey = cred.key
? FieldCrypto.decryptField(
cred.key,
userDEK,
cred.id.toString(),
"key",
)
: null;
const plainKeyPassword = cred.key_password
? FieldCrypto.decryptField(
cred.key_password,
userDEK,
cred.id.toString(),
"key_password",
)
: null;
const systemPassword = plainPassword
? FieldCrypto.encryptField(
plainPassword,
CSKEK,
cred.id.toString(),
"password",
)
: null;
const systemKey = plainKey
? FieldCrypto.encryptField(
plainKey,
CSKEK,
cred.id.toString(),
"key",
)
: null;
const systemKeyPassword = plainKeyPassword
? FieldCrypto.encryptField(
plainKeyPassword,
CSKEK,
cred.id.toString(),
"key_password",
)
: null;
await db
.update(sshCredentials)
.set({
systemPassword,
systemKey,
systemKeyPassword,
updatedAt: new Date().toISOString(),
})
.where(eq(sshCredentials.id, cred.id));
migrated++;
} catch (error) {
databaseLogger.error("Failed to migrate credential", error, {
credentialId: cred.id,
userId,
});
failed++;
}
}
return { migrated, failed, skipped };
} catch (error) {
databaseLogger.error(
"Credential system encryption migration failed",
error,
{
operation: "credential_migration_failed",
userId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
throw error;
}
}
}

View File

@@ -475,6 +475,52 @@ class DataCrypto {
return false;
}
}
/**
* Encrypt sensitive credential fields with system key for offline sharing
* Returns an object with systemPassword, systemKey, systemKeyPassword fields
*/
static async encryptRecordWithSystemKey<T extends Record<string, unknown>>(
tableName: string,
record: T,
systemKey: Buffer,
): Promise<Partial<T>> {
const systemEncrypted: Record<string, unknown> = {};
const recordId = record.id || "temp-" + Date.now();
if (tableName !== "ssh_credentials") {
return systemEncrypted as Partial<T>;
}
if (record.password && typeof record.password === "string") {
systemEncrypted.systemPassword = FieldCrypto.encryptField(
record.password as string,
systemKey,
recordId as string,
"password",
);
}
if (record.key && typeof record.key === "string") {
systemEncrypted.systemKey = FieldCrypto.encryptField(
record.key as string,
systemKey,
recordId as string,
"key",
);
}
if (record.key_password && typeof record.key_password === "string") {
systemEncrypted.systemKeyPassword = FieldCrypto.encryptField(
record.key_password as string,
systemKey,
recordId as string,
"key_password",
);
}
return systemEncrypted as Partial<T>;
}
}
export { DataCrypto };

View File

@@ -12,6 +12,7 @@ interface EncryptedFileMetadata {
algorithm: string;
keySource?: string;
salt?: string;
dataSize?: number;
}
class DatabaseFileEncryption {
@@ -25,11 +26,12 @@ class DatabaseFileEncryption {
buffer: Buffer,
targetPath: string,
): Promise<string> {
const tmpPath = `${targetPath}.tmp-${Date.now()}-${process.pid}`;
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
try {
const key = await this.systemCrypto.getDatabaseKey();
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(
this.ALGORITHM,
key,
@@ -45,14 +47,55 @@ class DatabaseFileEncryption {
fingerprint: "termix-v2-systemcrypto",
algorithm: this.ALGORITHM,
keySource: "SystemCrypto",
dataSize: encrypted.length,
};
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
fs.writeFileSync(targetPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
const metadataJson = JSON.stringify(metadata, null, 2);
const metadataBuffer = Buffer.from(metadataJson, "utf8");
const metadataLengthBuffer = Buffer.alloc(4);
metadataLengthBuffer.writeUInt32BE(metadataBuffer.length, 0);
const finalBuffer = Buffer.concat([
metadataLengthBuffer,
metadataBuffer,
encrypted,
]);
fs.writeFileSync(tmpPath, finalBuffer);
fs.renameSync(tmpPath, targetPath);
try {
if (fs.existsSync(metadataPath)) {
fs.unlinkSync(metadataPath);
}
} catch (cleanupError) {
databaseLogger.warn("Failed to cleanup old metadata file", {
operation: "old_meta_cleanup_failed",
path: metadataPath,
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
return targetPath;
} catch (error) {
try {
if (fs.existsSync(tmpPath)) {
fs.unlinkSync(tmpPath);
}
} catch (cleanupError) {
databaseLogger.warn("Failed to cleanup temporary files", {
operation: "temp_file_cleanup_failed",
tmpPath,
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
databaseLogger.error("Failed to encrypt database buffer", error, {
operation: "database_buffer_encryption_failed",
targetPath,
@@ -74,6 +117,8 @@ class DatabaseFileEncryption {
const encryptedPath =
targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const tmpPath = `${encryptedPath}.tmp-${Date.now()}-${process.pid}`;
const tmpMetadataPath = `${tmpPath}${this.METADATA_FILE_SUFFIX}`;
try {
const sourceData = fs.readFileSync(sourcePath);
@@ -93,6 +138,12 @@ class DatabaseFileEncryption {
]);
const tag = cipher.getAuthTag();
const keyFingerprint = crypto
.createHash("sha256")
.update(key)
.digest("hex")
.substring(0, 16);
const metadata: EncryptedFileMetadata = {
iv: iv.toString("hex"),
tag: tag.toString("hex"),
@@ -100,10 +151,14 @@ class DatabaseFileEncryption {
fingerprint: "termix-v2-systemcrypto",
algorithm: this.ALGORITHM,
keySource: "SystemCrypto",
dataSize: encrypted.length,
};
fs.writeFileSync(encryptedPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
fs.writeFileSync(tmpPath, encrypted);
fs.writeFileSync(tmpMetadataPath, JSON.stringify(metadata, null, 2));
fs.renameSync(tmpPath, encryptedPath);
fs.renameSync(tmpMetadataPath, metadataPath);
databaseLogger.info("Database file encrypted successfully", {
operation: "database_file_encryption",
@@ -111,11 +166,30 @@ class DatabaseFileEncryption {
encryptedPath,
fileSize: sourceData.length,
encryptedSize: encrypted.length,
keyFingerprint,
fingerprintPrefix: metadata.fingerprint,
});
return encryptedPath;
} catch (error) {
try {
if (fs.existsSync(tmpPath)) {
fs.unlinkSync(tmpPath);
}
if (fs.existsSync(tmpMetadataPath)) {
fs.unlinkSync(tmpMetadataPath);
}
} catch (cleanupError) {
databaseLogger.warn("Failed to cleanup temporary files", {
operation: "temp_file_cleanup_failed",
tmpPath,
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
databaseLogger.error("Failed to encrypt database file", error, {
operation: "database_file_encryption_failed",
sourcePath,
@@ -134,16 +208,69 @@ class DatabaseFileEncryption {
);
}
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(metadataPath)) {
throw new Error(`Metadata file does not exist: ${metadataPath}`);
let metadata: EncryptedFileMetadata;
let encryptedData: Buffer;
const fileBuffer = fs.readFileSync(encryptedPath);
try {
const metadataLength = fileBuffer.readUInt32BE(0);
const metadataEnd = 4 + metadataLength;
if (
metadataLength <= 0 ||
metadataEnd > fileBuffer.length ||
metadataEnd <= 4
) {
throw new Error("Invalid metadata length in single-file format");
}
const metadataJson = fileBuffer.slice(4, metadataEnd).toString("utf8");
metadata = JSON.parse(metadataJson);
encryptedData = fileBuffer.slice(metadataEnd);
if (!metadata.iv || !metadata.tag || !metadata.version) {
throw new Error("Invalid metadata structure in single-file format");
}
} catch (singleFileError) {
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(metadataPath)) {
throw new Error(
`Could not read database: Not a valid single-file format and metadata file is missing: ${metadataPath}. Error: ${singleFileError.message}`,
);
}
try {
const metadataContent = fs.readFileSync(metadataPath, "utf8");
metadata = JSON.parse(metadataContent);
encryptedData = fileBuffer;
} catch (twoFileError) {
throw new Error(
`Failed to read database using both single-file and two-file formats. Error: ${twoFileError.message}`,
);
}
}
try {
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
const encryptedData = fs.readFileSync(encryptedPath);
if (
metadata.dataSize !== undefined &&
encryptedData.length !== metadata.dataSize
) {
databaseLogger.error(
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
null,
{
operation: "database_file_size_mismatch",
encryptedPath,
actualSize: encryptedData.length,
expectedSize: metadata.dataSize,
},
);
throw new Error(
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
`This indicates corrupted files or interrupted write operation.`,
);
}
let key: Buffer;
if (metadata.version === "v2") {
@@ -181,13 +308,63 @@ class DatabaseFileEncryption {
return decryptedBuffer;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
const isAuthError =
errorMessage.includes("Unsupported state") ||
errorMessage.includes("authenticate data") ||
errorMessage.includes("auth");
if (isAuthError) {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
let envFileExists = false;
let envFileReadable = false;
try {
envFileExists = fs.existsSync(envPath);
if (envFileExists) {
fs.accessSync(envPath, fs.constants.R_OK);
envFileReadable = true;
}
} catch (error) {}
databaseLogger.error(
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",
error,
{
operation: "database_buffer_decryption_auth_failed",
encryptedPath,
dataDir,
envPath,
envFileExists,
envFileReadable,
hasEnvKey: !!process.env.DATABASE_KEY,
envKeyLength: process.env.DATABASE_KEY?.length || 0,
suggestion:
"Check if DATABASE_KEY in .env matches the key used for encryption",
},
);
throw new Error(
`Database decryption authentication failed. This usually means:\n` +
`1. DATABASE_KEY has changed or is missing from ${dataDir}/.env\n` +
`2. Encrypted file was corrupted during write (system crash/restart)\n` +
`3. Metadata file does not match encrypted data\n` +
`\nDebug info:\n` +
`- DATA_DIR: ${dataDir}\n` +
`- .env file exists: ${envFileExists}\n` +
`- .env file readable: ${envFileReadable}\n` +
`- DATABASE_KEY in environment: ${!!process.env.DATABASE_KEY}\n` +
`Original error: ${errorMessage}`,
);
}
databaseLogger.error("Failed to decrypt database to buffer", error, {
operation: "database_buffer_decryption_failed",
encryptedPath,
errorMessage,
});
throw new Error(
`Database buffer decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
throw new Error(`Database buffer decryption failed: ${errorMessage}`);
}
}
@@ -215,6 +392,26 @@ class DatabaseFileEncryption {
const encryptedData = fs.readFileSync(encryptedPath);
if (
metadata.dataSize !== undefined &&
encryptedData.length !== metadata.dataSize
) {
databaseLogger.error(
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
null,
{
operation: "database_file_size_mismatch",
encryptedPath,
actualSize: encryptedData.length,
expectedSize: metadata.dataSize,
},
);
throw new Error(
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
`This indicates corrupted files or interrupted write operation.`,
);
}
let key: Buffer;
if (metadata.version === "v2") {
key = await this.systemCrypto.getDatabaseKey();
@@ -274,18 +471,43 @@ class DatabaseFileEncryption {
}
static isEncryptedDatabaseFile(filePath: string): boolean {
const metadataPath = `${filePath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(filePath) || !fs.existsSync(metadataPath)) {
if (!fs.existsSync(filePath)) {
return false;
}
const metadataPath = `${filePath}${this.METADATA_FILE_SUFFIX}`;
if (fs.existsSync(metadataPath)) {
try {
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
return (
metadata.version === this.VERSION &&
metadata.algorithm === this.ALGORITHM
);
} catch {
return false;
}
}
try {
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
const fileBuffer = fs.readFileSync(filePath);
if (fileBuffer.length < 4) return false;
const metadataLength = fileBuffer.readUInt32BE(0);
const metadataEnd = 4 + metadataLength;
if (metadataLength <= 0 || metadataEnd > fileBuffer.length) {
return false;
}
const metadataJson = fileBuffer.slice(4, metadataEnd).toString("utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataJson);
return (
metadata.version === this.VERSION &&
metadata.algorithm === this.ALGORITHM
metadata.algorithm === this.ALGORITHM &&
!!metadata.iv &&
!!metadata.tag
);
} catch {
return false;
@@ -322,6 +544,125 @@ class DatabaseFileEncryption {
}
}
static getDiagnosticInfo(encryptedPath: string): {
dataFile: {
exists: boolean;
size?: number;
mtime?: string;
readable?: boolean;
};
metadataFile: {
exists: boolean;
size?: number;
mtime?: string;
readable?: boolean;
content?: EncryptedFileMetadata;
};
environment: {
dataDir: string;
envPath: string;
envFileExists: boolean;
envFileReadable: boolean;
hasEnvKey: boolean;
envKeyLength: number;
};
validation: {
filesConsistent: boolean;
sizeMismatch?: boolean;
expectedSize?: number;
actualSize?: number;
};
} {
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const result: ReturnType<typeof this.getDiagnosticInfo> = {
dataFile: { exists: false },
metadataFile: { exists: false },
environment: {
dataDir,
envPath,
envFileExists: false,
envFileReadable: false,
hasEnvKey: !!process.env.DATABASE_KEY,
envKeyLength: process.env.DATABASE_KEY?.length || 0,
},
validation: {
filesConsistent: false,
},
};
try {
result.dataFile.exists = fs.existsSync(encryptedPath);
if (result.dataFile.exists) {
try {
fs.accessSync(encryptedPath, fs.constants.R_OK);
result.dataFile.readable = true;
const stats = fs.statSync(encryptedPath);
result.dataFile.size = stats.size;
result.dataFile.mtime = stats.mtime.toISOString();
} catch {
result.dataFile.readable = false;
}
}
result.metadataFile.exists = fs.existsSync(metadataPath);
if (result.metadataFile.exists) {
try {
fs.accessSync(metadataPath, fs.constants.R_OK);
result.metadataFile.readable = true;
const stats = fs.statSync(metadataPath);
result.metadataFile.size = stats.size;
result.metadataFile.mtime = stats.mtime.toISOString();
const content = fs.readFileSync(metadataPath, "utf8");
result.metadataFile.content = JSON.parse(content);
} catch {
result.metadataFile.readable = false;
}
}
result.environment.envFileExists = fs.existsSync(envPath);
if (result.environment.envFileExists) {
try {
fs.accessSync(envPath, fs.constants.R_OK);
result.environment.envFileReadable = true;
} catch (error) {}
}
if (
result.dataFile.exists &&
result.metadataFile.exists &&
result.metadataFile.content
) {
result.validation.filesConsistent = true;
if (result.metadataFile.content.dataSize !== undefined) {
result.validation.expectedSize = result.metadataFile.content.dataSize;
result.validation.actualSize = result.dataFile.size;
result.validation.sizeMismatch =
result.metadataFile.content.dataSize !== result.dataFile.size;
if (result.validation.sizeMismatch) {
result.validation.filesConsistent = false;
}
}
}
} catch (error) {
databaseLogger.error("Failed to generate diagnostic info", error, {
operation: "diagnostic_info_failed",
encryptedPath,
});
}
databaseLogger.info("Database encryption diagnostic info", {
operation: "diagnostic_info_generated",
...result,
});
return result;
}
static async createEncryptedBackup(
databasePath: string,
backupDir: string,

View File

@@ -32,7 +32,6 @@ class FieldCrypto {
"key",
"key_password",
"keyPassword",
"keyType",
"autostartPassword",
"autostartKey",
"autostartKeyPassword",
@@ -46,7 +45,6 @@ class FieldCrypto {
"key",
"public_key",
"publicKey",
"keyType",
]),
};

View File

@@ -82,7 +82,7 @@ export class LazyFieldEncryption {
legacyFieldName,
);
return decrypted;
} catch {}
} catch (error) {}
}
const sensitiveFields = [
@@ -174,7 +174,7 @@ export class LazyFieldEncryption {
wasPlaintext: false,
wasLegacyEncryption: true,
};
} catch {}
} catch (error) {}
}
return {
encrypted: fieldValue,

View File

@@ -36,7 +36,7 @@ const SENSITIVE_FIELDS = [
const TRUNCATE_FIELDS = ["data", "content", "body", "response", "request"];
class Logger {
export class Logger {
private serviceName: string;
private serviceIcon: string;
private serviceColor: string;

View File

@@ -0,0 +1,286 @@
interface LoginAttempt {
count: number;
firstAttempt: number;
lockedUntil?: number;
}
class LoginRateLimiter {
private ipAttempts = new Map<string, LoginAttempt>();
private usernameAttempts = new Map<string, LoginAttempt>();
private totpAttempts = new Map<string, LoginAttempt>();
private resetCodeAttempts = new Map<string, LoginAttempt>();
private readonly MAX_ATTEMPTS = 5;
private readonly WINDOW_MS = 10 * 60 * 1000;
private readonly LOCKOUT_MS = 10 * 60 * 1000;
private readonly TOTP_MAX_ATTEMPTS = 5;
private readonly TOTP_WINDOW_MS = 1 * 60 * 1000;
private readonly TOTP_LOCKOUT_MS = 5 * 60 * 1000;
private readonly RESET_CODE_MAX_ATTEMPTS = 5;
private readonly RESET_CODE_WINDOW_MS = 1 * 60 * 1000;
private readonly RESET_CODE_LOCKOUT_MS = 5 * 60 * 1000;
constructor() {
setInterval(() => this.cleanup(), 5 * 60 * 1000);
}
private cleanup(): void {
const now = Date.now();
for (const [ip, attempt] of this.ipAttempts.entries()) {
if (attempt.lockedUntil && attempt.lockedUntil < now) {
this.ipAttempts.delete(ip);
} else if (
!attempt.lockedUntil &&
now - attempt.firstAttempt > this.WINDOW_MS
) {
this.ipAttempts.delete(ip);
}
}
for (const [username, attempt] of this.usernameAttempts.entries()) {
if (attempt.lockedUntil && attempt.lockedUntil < now) {
this.usernameAttempts.delete(username);
} else if (
!attempt.lockedUntil &&
now - attempt.firstAttempt > this.WINDOW_MS
) {
this.usernameAttempts.delete(username);
}
}
for (const [userId, attempt] of this.totpAttempts.entries()) {
if (attempt.lockedUntil && attempt.lockedUntil < now) {
this.totpAttempts.delete(userId);
} else if (
!attempt.lockedUntil &&
now - attempt.firstAttempt > this.TOTP_WINDOW_MS
) {
this.totpAttempts.delete(userId);
}
}
for (const [username, attempt] of this.resetCodeAttempts.entries()) {
if (attempt.lockedUntil && attempt.lockedUntil < now) {
this.resetCodeAttempts.delete(username);
} else if (
!attempt.lockedUntil &&
now - attempt.firstAttempt > this.RESET_CODE_WINDOW_MS
) {
this.resetCodeAttempts.delete(username);
}
}
}
recordFailedAttempt(ip: string, username?: string): void {
const now = Date.now();
const ipAttempt = this.ipAttempts.get(ip);
if (!ipAttempt) {
this.ipAttempts.set(ip, {
count: 1,
firstAttempt: now,
});
} else if (now - ipAttempt.firstAttempt > this.WINDOW_MS) {
this.ipAttempts.set(ip, {
count: 1,
firstAttempt: now,
});
} else {
ipAttempt.count++;
if (ipAttempt.count >= this.MAX_ATTEMPTS) {
ipAttempt.lockedUntil = now + this.LOCKOUT_MS;
}
}
if (username) {
const userAttempt = this.usernameAttempts.get(username);
if (!userAttempt) {
this.usernameAttempts.set(username, {
count: 1,
firstAttempt: now,
});
} else if (now - userAttempt.firstAttempt > this.WINDOW_MS) {
this.usernameAttempts.set(username, {
count: 1,
firstAttempt: now,
});
} else {
userAttempt.count++;
if (userAttempt.count >= this.MAX_ATTEMPTS) {
userAttempt.lockedUntil = now + this.LOCKOUT_MS;
}
}
}
}
resetAttempts(ip: string, username?: string): void {
this.ipAttempts.delete(ip);
if (username) {
this.usernameAttempts.delete(username);
}
}
isLocked(
ip: string,
username?: string,
): { locked: boolean; remainingTime?: number } {
const now = Date.now();
const ipAttempt = this.ipAttempts.get(ip);
if (ipAttempt?.lockedUntil && ipAttempt.lockedUntil > now) {
return {
locked: true,
remainingTime: Math.ceil((ipAttempt.lockedUntil - now) / 1000),
};
}
if (username) {
const userAttempt = this.usernameAttempts.get(username);
if (userAttempt?.lockedUntil && userAttempt.lockedUntil > now) {
return {
locked: true,
remainingTime: Math.ceil((userAttempt.lockedUntil - now) / 1000),
};
}
}
return { locked: false };
}
getRemainingAttempts(ip: string, username?: string): number {
const now = Date.now();
let minRemaining = this.MAX_ATTEMPTS;
const ipAttempt = this.ipAttempts.get(ip);
if (ipAttempt && now - ipAttempt.firstAttempt <= this.WINDOW_MS) {
const ipRemaining = Math.max(0, this.MAX_ATTEMPTS - ipAttempt.count);
minRemaining = Math.min(minRemaining, ipRemaining);
}
if (username) {
const userAttempt = this.usernameAttempts.get(username);
if (userAttempt && now - userAttempt.firstAttempt <= this.WINDOW_MS) {
const userRemaining = Math.max(
0,
this.MAX_ATTEMPTS - userAttempt.count,
);
minRemaining = Math.min(minRemaining, userRemaining);
}
}
return minRemaining;
}
recordFailedTOTPAttempt(userId: string): void {
const now = Date.now();
const totpAttempt = this.totpAttempts.get(userId);
if (!totpAttempt) {
this.totpAttempts.set(userId, {
count: 1,
firstAttempt: now,
});
} else if (now - totpAttempt.firstAttempt > this.TOTP_WINDOW_MS) {
this.totpAttempts.set(userId, {
count: 1,
firstAttempt: now,
});
} else {
totpAttempt.count++;
if (totpAttempt.count >= this.TOTP_MAX_ATTEMPTS) {
totpAttempt.lockedUntil = now + this.TOTP_LOCKOUT_MS;
}
}
}
resetTOTPAttempts(userId: string): void {
this.totpAttempts.delete(userId);
}
isTOTPLocked(userId: string): { locked: boolean; remainingTime?: number } {
const now = Date.now();
const totpAttempt = this.totpAttempts.get(userId);
if (totpAttempt?.lockedUntil && totpAttempt.lockedUntil > now) {
return {
locked: true,
remainingTime: Math.ceil((totpAttempt.lockedUntil - now) / 1000),
};
}
return { locked: false };
}
getRemainingTOTPAttempts(userId: string): number {
const now = Date.now();
const totpAttempt = this.totpAttempts.get(userId);
if (totpAttempt && now - totpAttempt.firstAttempt <= this.TOTP_WINDOW_MS) {
return Math.max(0, this.TOTP_MAX_ATTEMPTS - totpAttempt.count);
}
return this.TOTP_MAX_ATTEMPTS;
}
recordResetCodeAttempt(username: string): void {
const now = Date.now();
const resetAttempt = this.resetCodeAttempts.get(username);
if (!resetAttempt) {
this.resetCodeAttempts.set(username, {
count: 1,
firstAttempt: now,
});
} else if (now - resetAttempt.firstAttempt > this.RESET_CODE_WINDOW_MS) {
this.resetCodeAttempts.set(username, {
count: 1,
firstAttempt: now,
});
} else {
resetAttempt.count++;
if (resetAttempt.count >= this.RESET_CODE_MAX_ATTEMPTS) {
resetAttempt.lockedUntil = now + this.RESET_CODE_LOCKOUT_MS;
}
}
}
resetResetCodeAttempts(username: string): void {
this.resetCodeAttempts.delete(username);
}
isResetCodeLocked(username: string): {
locked: boolean;
remainingTime?: number;
} {
const now = Date.now();
const resetAttempt = this.resetCodeAttempts.get(username);
if (resetAttempt?.lockedUntil && resetAttempt.lockedUntil > now) {
return {
locked: true,
remainingTime: Math.ceil((resetAttempt.lockedUntil - now) / 1000),
};
}
return { locked: false };
}
getRemainingResetCodeAttempts(username: string): number {
const now = Date.now();
const resetAttempt = this.resetCodeAttempts.get(username);
if (
resetAttempt &&
now - resetAttempt.firstAttempt <= this.RESET_CODE_WINDOW_MS
) {
return Math.max(0, this.RESET_CODE_MAX_ATTEMPTS - resetAttempt.count);
}
return this.RESET_CODE_MAX_ATTEMPTS;
}
}
export const loginRateLimiter = new LoginRateLimiter();

View File

@@ -0,0 +1,436 @@
import type { Request, Response, NextFunction } from "express";
import { db } from "../database/db/index.js";
import {
hostAccess,
roles,
userRoles,
sshData,
users,
} from "../database/db/schema.js";
import { eq, and, or, isNull, gte, sql } from "drizzle-orm";
import { databaseLogger } from "./logger.js";
interface AuthenticatedRequest extends Request {
userId?: string;
dataKey?: Buffer;
}
interface HostAccessInfo {
hasAccess: boolean;
isOwner: boolean;
isShared: boolean;
permissionLevel?: "view";
expiresAt?: string | null;
}
interface PermissionCheckResult {
allowed: boolean;
reason?: string;
}
class PermissionManager {
private static instance: PermissionManager;
private permissionCache: Map<
string,
{ permissions: string[]; timestamp: number }
>;
private readonly CACHE_TTL = 5 * 60 * 1000;
private constructor() {
this.permissionCache = new Map();
setInterval(() => {
this.cleanupExpiredAccess().catch((error) => {
databaseLogger.error(
"Failed to run periodic host access cleanup",
error,
{
operation: "host_access_cleanup_periodic",
},
);
});
}, 60 * 1000);
setInterval(() => {
this.clearPermissionCache();
}, this.CACHE_TTL);
}
static getInstance(): PermissionManager {
if (!this.instance) {
this.instance = new PermissionManager();
}
return this.instance;
}
/**
* Clean up expired host access entries
*/
private async cleanupExpiredAccess(): Promise<void> {
try {
const now = new Date().toISOString();
const result = await db
.delete(hostAccess)
.where(
and(
sql`${hostAccess.expiresAt} IS NOT NULL`,
sql`${hostAccess.expiresAt} <= ${now}`,
),
)
.returning({ id: hostAccess.id });
} catch (error) {
databaseLogger.error("Failed to cleanup expired host access", error, {
operation: "host_access_cleanup_failed",
});
}
}
/**
* Clear permission cache
*/
private clearPermissionCache(): void {
this.permissionCache.clear();
}
/**
* Invalidate permission cache for a specific user
*/
invalidateUserPermissionCache(userId: string): void {
this.permissionCache.delete(userId);
}
/**
* Get user permissions from roles
*/
async getUserPermissions(userId: string): Promise<string[]> {
const cached = this.permissionCache.get(userId);
if (cached && Date.now() - cached.timestamp < this.CACHE_TTL) {
return cached.permissions;
}
try {
const userRoleRecords = await db
.select({
permissions: roles.permissions,
})
.from(userRoles)
.innerJoin(roles, eq(userRoles.roleId, roles.id))
.where(eq(userRoles.userId, userId));
const allPermissions = new Set<string>();
for (const record of userRoleRecords) {
try {
const permissions = JSON.parse(record.permissions) as string[];
for (const perm of permissions) {
allPermissions.add(perm);
}
} catch (parseError) {
databaseLogger.warn("Failed to parse role permissions", {
operation: "get_user_permissions",
userId,
error: parseError,
});
}
}
const permissionsArray = Array.from(allPermissions);
this.permissionCache.set(userId, {
permissions: permissionsArray,
timestamp: Date.now(),
});
return permissionsArray;
} catch (error) {
databaseLogger.error("Failed to get user permissions", error, {
operation: "get_user_permissions",
userId,
});
return [];
}
}
/**
* Check if user has a specific permission
* Supports wildcards: "hosts.*", "*"
*/
async hasPermission(userId: string, permission: string): Promise<boolean> {
const userPermissions = await this.getUserPermissions(userId);
if (userPermissions.includes("*")) {
return true;
}
if (userPermissions.includes(permission)) {
return true;
}
const parts = permission.split(".");
for (let i = parts.length; i > 0; i--) {
const wildcardPermission = parts.slice(0, i).join(".") + ".*";
if (userPermissions.includes(wildcardPermission)) {
return true;
}
}
return false;
}
/**
* Check if user can access a specific host
*/
async canAccessHost(
userId: string,
hostId: number,
action: "read" | "write" | "execute" | "delete" | "share" = "read",
): Promise<HostAccessInfo> {
try {
const host = await db
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
.limit(1);
if (host.length > 0) {
return {
hasAccess: true,
isOwner: true,
isShared: false,
};
}
const userRoleIds = await db
.select({ roleId: userRoles.roleId })
.from(userRoles)
.where(eq(userRoles.userId, userId));
const roleIds = userRoleIds.map((r) => r.roleId);
const now = new Date().toISOString();
const sharedAccess = await db
.select()
.from(hostAccess)
.where(
and(
eq(hostAccess.hostId, hostId),
or(
eq(hostAccess.userId, userId),
roleIds.length > 0
? sql`${hostAccess.roleId} IN (${sql.join(
roleIds.map((id) => sql`${id}`),
sql`, `,
)})`
: sql`false`,
),
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
),
)
.limit(1);
if (sharedAccess.length > 0) {
const access = sharedAccess[0];
if (action === "write" || action === "delete") {
return {
hasAccess: false,
isOwner: false,
isShared: true,
permissionLevel: access.permissionLevel as "view",
expiresAt: access.expiresAt,
};
}
try {
await db
.update(hostAccess)
.set({
lastAccessedAt: now,
})
.where(eq(hostAccess.id, access.id));
} catch (error) {
databaseLogger.warn("Failed to update host access timestamp", {
operation: "update_host_access_timestamp",
error,
});
}
return {
hasAccess: true,
isOwner: false,
isShared: true,
permissionLevel: access.permissionLevel as "view",
expiresAt: access.expiresAt,
};
}
return {
hasAccess: false,
isOwner: false,
isShared: false,
};
} catch (error) {
databaseLogger.error("Failed to check host access", error, {
operation: "can_access_host",
userId,
hostId,
action,
});
return {
hasAccess: false,
isOwner: false,
isShared: false,
};
}
}
/**
* Check if user is admin (backward compatibility)
*/
async isAdmin(userId: string): Promise<boolean> {
try {
const user = await db
.select({ isAdmin: users.is_admin })
.from(users)
.where(eq(users.id, userId))
.limit(1);
if (user.length > 0 && user[0].isAdmin) {
return true;
}
const adminRoles = await db
.select({ roleName: roles.name })
.from(userRoles)
.innerJoin(roles, eq(userRoles.roleId, roles.id))
.where(
and(
eq(userRoles.userId, userId),
or(eq(roles.name, "admin"), eq(roles.name, "super_admin")),
),
);
return adminRoles.length > 0;
} catch (error) {
databaseLogger.error("Failed to check admin status", error, {
operation: "is_admin",
userId,
});
return false;
}
}
/**
* Middleware: Require specific permission
*/
requirePermission(permission: string) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const hasPermission = await this.hasPermission(userId, permission);
if (!hasPermission) {
databaseLogger.warn("Permission denied", {
operation: "permission_check",
userId,
permission,
path: req.path,
});
return res.status(403).json({
error: "Insufficient permissions",
required: permission,
});
}
next();
};
}
/**
* Middleware: Require host access
*/
requireHostAccess(
hostIdParam: string = "id",
action: "read" | "write" | "execute" | "delete" | "share" = "read",
) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const hostId = parseInt(req.params[hostIdParam], 10);
if (isNaN(hostId)) {
return res.status(400).json({ error: "Invalid host ID" });
}
const accessInfo = await this.canAccessHost(userId, hostId, action);
if (!accessInfo.hasAccess) {
databaseLogger.warn("Host access denied", {
operation: "host_access_check",
userId,
hostId,
action,
});
return res.status(403).json({
error: "Access denied to host",
hostId,
action,
});
}
(req as any).hostAccessInfo = accessInfo;
next();
};
}
/**
* Middleware: Require admin role (backward compatible)
*/
requireAdmin() {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const isAdmin = await this.isAdmin(userId);
if (!isAdmin) {
databaseLogger.warn("Admin access denied", {
operation: "admin_check",
userId,
path: req.path,
});
return res.status(403).json({ error: "Admin access required" });
}
next();
};
}
}
export { PermissionManager };
export type { AuthenticatedRequest, HostAccessInfo, PermissionCheckResult };

View File

@@ -0,0 +1,700 @@
import { db } from "../database/db/index.js";
import {
sharedCredentials,
sshCredentials,
hostAccess,
users,
userRoles,
sshData,
} from "../database/db/schema.js";
import { eq, and } from "drizzle-orm";
import { DataCrypto } from "./data-crypto.js";
import { FieldCrypto } from "./field-crypto.js";
import { databaseLogger } from "./logger.js";
interface CredentialData {
username: string;
authType: string;
password?: string;
key?: string;
keyPassword?: string;
keyType?: string;
}
/**
* Manages shared credentials for RBAC host sharing.
* Creates per-user encrypted credential copies to enable credential sharing
* without requiring the credential owner to be online.
*/
class SharedCredentialManager {
private static instance: SharedCredentialManager;
private constructor() {}
static getInstance(): SharedCredentialManager {
if (!this.instance) {
this.instance = new SharedCredentialManager();
}
return this.instance;
}
/**
* Create shared credential for a specific user
* Called when sharing a host with a user
*/
async createSharedCredentialForUser(
hostAccessId: number,
originalCredentialId: number,
targetUserId: string,
ownerId: string,
): Promise<void> {
try {
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
if (ownerDEK) {
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
if (!targetDEK) {
await this.createPendingSharedCredential(
hostAccessId,
originalCredentialId,
targetUserId,
);
return;
}
const credentialData = await this.getDecryptedCredential(
originalCredentialId,
ownerId,
ownerDEK,
);
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
targetUserId,
targetDEK,
hostAccessId,
);
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
...encryptedForTarget,
needsReEncryption: false,
});
} else {
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
if (!targetDEK) {
await this.createPendingSharedCredential(
hostAccessId,
originalCredentialId,
targetUserId,
);
return;
}
const credentialData =
await this.getDecryptedCredentialViaSystemKey(originalCredentialId);
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
targetUserId,
targetDEK,
hostAccessId,
);
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
...encryptedForTarget,
needsReEncryption: false,
});
}
} catch (error) {
databaseLogger.error("Failed to create shared credential", error, {
operation: "create_shared_credential",
hostAccessId,
targetUserId,
});
throw error;
}
}
/**
* Create shared credentials for all users in a role
* Called when sharing a host with a role
*/
async createSharedCredentialsForRole(
hostAccessId: number,
originalCredentialId: number,
roleId: number,
ownerId: string,
): Promise<void> {
try {
const roleUsers = await db
.select({ userId: userRoles.userId })
.from(userRoles)
.where(eq(userRoles.roleId, roleId));
for (const { userId } of roleUsers) {
try {
await this.createSharedCredentialForUser(
hostAccessId,
originalCredentialId,
userId,
ownerId,
);
} catch (error) {
databaseLogger.error(
"Failed to create shared credential for role member",
error,
{
operation: "create_shared_credentials_role",
hostAccessId,
roleId,
userId,
},
);
}
}
} catch (error) {
databaseLogger.error(
"Failed to create shared credentials for role",
error,
{
operation: "create_shared_credentials_role",
hostAccessId,
roleId,
},
);
throw error;
}
}
/**
* Get credential data for a shared user
* Called when a shared user connects to a host
*/
async getSharedCredentialForUser(
hostId: number,
userId: string,
): Promise<CredentialData | null> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
throw new Error(`User ${userId} data not unlocked`);
}
const sharedCred = await db
.select()
.from(sharedCredentials)
.innerJoin(
hostAccess,
eq(sharedCredentials.hostAccessId, hostAccess.id),
)
.where(
and(
eq(hostAccess.hostId, hostId),
eq(sharedCredentials.targetUserId, userId),
),
)
.limit(1);
if (sharedCred.length === 0) {
return null;
}
const cred = sharedCred[0].shared_credentials;
if (cred.needsReEncryption) {
databaseLogger.warn(
"Shared credential needs re-encryption but cannot be accessed yet",
{
operation: "get_shared_credential_pending",
hostId,
userId,
},
);
return null;
}
return this.decryptSharedCredential(cred, userDEK);
} catch (error) {
databaseLogger.error("Failed to get shared credential", error, {
operation: "get_shared_credential",
hostId,
userId,
});
throw error;
}
}
/**
* Update all shared credentials when original credential is updated
* Called when credential owner updates credential
*/
async updateSharedCredentialsForOriginal(
credentialId: number,
ownerId: string,
): Promise<void> {
try {
const sharedCreds = await db
.select()
.from(sharedCredentials)
.where(eq(sharedCredentials.originalCredentialId, credentialId));
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
let credentialData: CredentialData;
if (ownerDEK) {
credentialData = await this.getDecryptedCredential(
credentialId,
ownerId,
ownerDEK,
);
} else {
try {
credentialData =
await this.getDecryptedCredentialViaSystemKey(credentialId);
} catch (error) {
databaseLogger.warn(
"Cannot update shared credentials: owner offline and credential not migrated",
{
operation: "update_shared_credentials_failed",
credentialId,
ownerId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
await db
.update(sharedCredentials)
.set({ needsReEncryption: true })
.where(eq(sharedCredentials.originalCredentialId, credentialId));
return;
}
}
for (const sharedCred of sharedCreds) {
const targetDEK = DataCrypto.getUserDataKey(sharedCred.targetUserId);
if (!targetDEK) {
await db
.update(sharedCredentials)
.set({ needsReEncryption: true })
.where(eq(sharedCredentials.id, sharedCred.id));
continue;
}
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
sharedCred.targetUserId,
targetDEK,
sharedCred.hostAccessId,
);
await db
.update(sharedCredentials)
.set({
...encryptedForTarget,
needsReEncryption: false,
updatedAt: new Date().toISOString(),
})
.where(eq(sharedCredentials.id, sharedCred.id));
}
} catch (error) {
databaseLogger.error("Failed to update shared credentials", error, {
operation: "update_shared_credentials",
credentialId,
});
}
}
/**
* Delete shared credentials when original credential is deleted
* Called from credential deletion route
*/
async deleteSharedCredentialsForOriginal(
credentialId: number,
): Promise<void> {
try {
const result = await db
.delete(sharedCredentials)
.where(eq(sharedCredentials.originalCredentialId, credentialId))
.returning({ id: sharedCredentials.id });
} catch (error) {
databaseLogger.error("Failed to delete shared credentials", error, {
operation: "delete_shared_credentials",
credentialId,
});
}
}
/**
* Re-encrypt pending shared credentials for a user when they log in
* Called during user login
*/
async reEncryptPendingCredentialsForUser(userId: string): Promise<void> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
return;
}
const pendingCreds = await db
.select()
.from(sharedCredentials)
.where(
and(
eq(sharedCredentials.targetUserId, userId),
eq(sharedCredentials.needsReEncryption, true),
),
);
for (const cred of pendingCreds) {
await this.reEncryptSharedCredential(cred.id, userId);
}
} catch (error) {
databaseLogger.error("Failed to re-encrypt pending credentials", error, {
operation: "reencrypt_pending_credentials",
userId,
});
}
}
private async getDecryptedCredential(
credentialId: number,
ownerId: string,
ownerDEK: Buffer,
): Promise<CredentialData> {
const creds = await db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, ownerId),
),
)
.limit(1);
if (creds.length === 0) {
throw new Error(`Credential ${credentialId} not found`);
}
const cred = creds[0];
return {
username: cred.username,
authType: cred.authType,
password: cred.password
? this.decryptField(cred.password, ownerDEK, credentialId, "password")
: undefined,
key: cred.key
? this.decryptField(cred.key, ownerDEK, credentialId, "key")
: undefined,
keyPassword: cred.key_password
? this.decryptField(
cred.key_password,
ownerDEK,
credentialId,
"key_password",
)
: undefined,
keyType: cred.keyType,
};
}
/**
* Decrypt credential using system key (for offline sharing when owner is offline)
*/
private async getDecryptedCredentialViaSystemKey(
credentialId: number,
): Promise<CredentialData> {
const creds = await db
.select()
.from(sshCredentials)
.where(eq(sshCredentials.id, credentialId))
.limit(1);
if (creds.length === 0) {
throw new Error(`Credential ${credentialId} not found`);
}
const cred = creds[0];
if (!cred.systemPassword && !cred.systemKey && !cred.systemKeyPassword) {
throw new Error(
"Credential not yet migrated for offline sharing. " +
"Please ask credential owner to log in to enable sharing.",
);
}
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const CSKEK = await systemCrypto.getCredentialSharingKey();
return {
username: cred.username,
authType: cred.authType,
password: cred.systemPassword
? this.decryptField(
cred.systemPassword,
CSKEK,
credentialId,
"password",
)
: undefined,
key: cred.systemKey
? this.decryptField(cred.systemKey, CSKEK, credentialId, "key")
: undefined,
keyPassword: cred.systemKeyPassword
? this.decryptField(
cred.systemKeyPassword,
CSKEK,
credentialId,
"key_password",
)
: undefined,
keyType: cred.keyType,
};
}
private encryptCredentialForUser(
credentialData: CredentialData,
targetUserId: string,
targetDEK: Buffer,
hostAccessId: number,
): {
encryptedUsername: string;
encryptedAuthType: string;
encryptedPassword: string | null;
encryptedKey: string | null;
encryptedKeyPassword: string | null;
encryptedKeyType: string | null;
} {
const recordId = `shared-${hostAccessId}-${targetUserId}`;
return {
encryptedUsername: FieldCrypto.encryptField(
credentialData.username,
targetDEK,
recordId,
"username",
),
encryptedAuthType: credentialData.authType,
encryptedPassword: credentialData.password
? FieldCrypto.encryptField(
credentialData.password,
targetDEK,
recordId,
"password",
)
: null,
encryptedKey: credentialData.key
? FieldCrypto.encryptField(
credentialData.key,
targetDEK,
recordId,
"key",
)
: null,
encryptedKeyPassword: credentialData.keyPassword
? FieldCrypto.encryptField(
credentialData.keyPassword,
targetDEK,
recordId,
"key_password",
)
: null,
encryptedKeyType: credentialData.keyType || null,
};
}
private decryptSharedCredential(
sharedCred: typeof sharedCredentials.$inferSelect,
userDEK: Buffer,
): CredentialData {
const recordId = `shared-${sharedCred.hostAccessId}-${sharedCred.targetUserId}`;
return {
username: FieldCrypto.decryptField(
sharedCred.encryptedUsername,
userDEK,
recordId,
"username",
),
authType: sharedCred.encryptedAuthType,
password: sharedCred.encryptedPassword
? FieldCrypto.decryptField(
sharedCred.encryptedPassword,
userDEK,
recordId,
"password",
)
: undefined,
key: sharedCred.encryptedKey
? FieldCrypto.decryptField(
sharedCred.encryptedKey,
userDEK,
recordId,
"key",
)
: undefined,
keyPassword: sharedCred.encryptedKeyPassword
? FieldCrypto.decryptField(
sharedCred.encryptedKeyPassword,
userDEK,
recordId,
"key_password",
)
: undefined,
keyType: sharedCred.encryptedKeyType || undefined,
};
}
private decryptField(
encryptedValue: string,
dek: Buffer,
recordId: number | string,
fieldName: string,
): string {
try {
return FieldCrypto.decryptField(
encryptedValue,
dek,
recordId.toString(),
fieldName,
);
} catch (error) {
databaseLogger.warn("Field decryption failed, returning as-is", {
operation: "decrypt_field",
fieldName,
recordId,
});
return encryptedValue;
}
}
private async createPendingSharedCredential(
hostAccessId: number,
originalCredentialId: number,
targetUserId: string,
): Promise<void> {
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
encryptedUsername: "",
encryptedAuthType: "",
needsReEncryption: true,
});
databaseLogger.info("Created pending shared credential", {
operation: "create_pending_shared_credential",
hostAccessId,
targetUserId,
});
}
private async reEncryptSharedCredential(
sharedCredId: number,
userId: string,
): Promise<void> {
try {
const sharedCred = await db
.select()
.from(sharedCredentials)
.where(eq(sharedCredentials.id, sharedCredId))
.limit(1);
if (sharedCred.length === 0) {
databaseLogger.warn("Re-encrypt: shared credential not found", {
operation: "reencrypt_not_found",
sharedCredId,
});
return;
}
const cred = sharedCred[0];
const access = await db
.select()
.from(hostAccess)
.innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
.where(eq(hostAccess.id, cred.hostAccessId))
.limit(1);
if (access.length === 0) {
databaseLogger.warn("Re-encrypt: host access not found", {
operation: "reencrypt_access_not_found",
sharedCredId,
});
return;
}
const ownerId = access[0].ssh_data.userId;
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
databaseLogger.warn("Re-encrypt: user DEK not available", {
operation: "reencrypt_user_offline",
sharedCredId,
userId,
});
return;
}
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
let credentialData: CredentialData;
if (ownerDEK) {
credentialData = await this.getDecryptedCredential(
cred.originalCredentialId,
ownerId,
ownerDEK,
);
} else {
try {
credentialData = await this.getDecryptedCredentialViaSystemKey(
cred.originalCredentialId,
);
} catch (error) {
databaseLogger.warn(
"Re-encrypt: system key decryption failed, credential may not be migrated yet",
{
operation: "reencrypt_system_key_failed",
sharedCredId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
return;
}
}
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
userId,
userDEK,
cred.hostAccessId,
);
await db
.update(sharedCredentials)
.set({
...encryptedForTarget,
needsReEncryption: false,
updatedAt: new Date().toISOString(),
})
.where(eq(sharedCredentials.id, sharedCredId));
} catch (error) {
databaseLogger.error("Failed to re-encrypt shared credential", error, {
operation: "reencrypt_shared_credential",
sharedCredId,
userId,
});
}
}
}
export { SharedCredentialManager };

View File

@@ -2,7 +2,12 @@ import { getDb, DatabaseSaveTrigger } from "../database/db/index.js";
import { DataCrypto } from "./data-crypto.js";
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity";
type TableName =
| "users"
| "ssh_data"
| "ssh_credentials"
| "recent_activity"
| "socks5_proxy_presets";
class SimpleDBOps {
static async insert<T extends Record<string, unknown>>(
@@ -23,6 +28,20 @@ class SimpleDBOps {
userDataKey,
);
if (tableName === "ssh_credentials") {
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const systemKey = await systemCrypto.getCredentialSharingKey();
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
tableName,
dataWithTempId,
systemKey,
);
Object.assign(encryptedData, systemEncrypted);
}
if (!data.id) {
delete encryptedData.id;
}
@@ -105,6 +124,20 @@ class SimpleDBOps {
userDataKey,
);
if (tableName === "ssh_credentials") {
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const systemKey = await systemCrypto.getCredentialSharingKey();
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
tableName,
data,
systemKey,
);
Object.assign(encryptedData, systemEncrypted);
}
const result = await getDb()
.update(table)
.set(encryptedData)

View File

@@ -0,0 +1,131 @@
import { SocksClient } from "socks";
import type { SocksClientOptions } from "socks";
import net from "net";
import { sshLogger } from "./logger.js";
import type { ProxyNode } from "../../types/index.js";
export interface SOCKS5Config {
useSocks5?: boolean;
socks5Host?: string;
socks5Port?: number;
socks5Username?: string;
socks5Password?: string;
socks5ProxyChain?: ProxyNode[];
}
/**
* Creates a SOCKS5 connection through a single proxy or a chain of proxies
* @param targetHost - Target SSH server hostname/IP
* @param targetPort - Target SSH server port
* @param socks5Config - SOCKS5 proxy configuration
* @returns Promise with connected socket or null if SOCKS5 is not enabled
*/
export async function createSocks5Connection(
targetHost: string,
targetPort: number,
socks5Config: SOCKS5Config,
): Promise<net.Socket | null> {
if (!socks5Config.useSocks5) {
return null;
}
if (
socks5Config.socks5ProxyChain &&
socks5Config.socks5ProxyChain.length > 0
) {
return createProxyChainConnection(
targetHost,
targetPort,
socks5Config.socks5ProxyChain,
);
}
if (socks5Config.socks5Host) {
return createSingleProxyConnection(targetHost, targetPort, socks5Config);
}
return null;
}
/**
* Creates a connection through a single SOCKS proxy
*/
async function createSingleProxyConnection(
targetHost: string,
targetPort: number,
socks5Config: SOCKS5Config,
): Promise<net.Socket> {
const socksOptions: SocksClientOptions = {
proxy: {
host: socks5Config.socks5Host!,
port: socks5Config.socks5Port || 1080,
type: 5,
userId: socks5Config.socks5Username,
password: socks5Config.socks5Password,
},
command: "connect",
destination: {
host: targetHost,
port: targetPort,
},
};
try {
const info = await SocksClient.createConnection(socksOptions);
return info.socket;
} catch (error) {
sshLogger.error("SOCKS5 connection failed", error, {
operation: "socks5_connect_failed",
proxyHost: socks5Config.socks5Host,
proxyPort: socks5Config.socks5Port || 1080,
targetHost,
targetPort,
errorMessage: error instanceof Error ? error.message : "Unknown error",
});
throw error;
}
}
/**
* Creates a connection through a chain of SOCKS proxies
* Each proxy in the chain connects through the previous one
*/
async function createProxyChainConnection(
targetHost: string,
targetPort: number,
proxyChain: ProxyNode[],
): Promise<net.Socket> {
if (proxyChain.length === 0) {
throw new Error("Proxy chain is empty");
}
const chainPath = proxyChain.map((p) => `${p.host}:${p.port}`).join(" → ");
try {
const info = await SocksClient.createConnectionChain({
proxies: proxyChain.map((p) => ({
host: p.host,
port: p.port,
type: p.type,
userId: p.username,
password: p.password,
timeout: 10000,
})),
command: "connect",
destination: {
host: targetHost,
port: targetPort,
},
});
return info.socket;
} catch (error) {
sshLogger.error("SOCKS proxy chain connection failed", error, {
operation: "socks5_chain_connect_failed",
chainLength: proxyChain.length,
targetHost,
targetPort,
errorMessage: error instanceof Error ? error.message : "Unknown error",
});
throw error;
}
}

View File

@@ -1,4 +1,5 @@
import ssh2Pkg from "ssh2";
import { sshLogger } from "./logger.js";
const ssh2Utils = ssh2Pkg.utils;
function detectKeyTypeFromContent(keyContent: string): string {
@@ -84,7 +85,7 @@ function detectKeyTypeFromContent(keyContent: string): string {
} else if (decodedString.includes("1.3.101.112")) {
return "ssh-ed25519";
}
} catch {}
} catch (error) {}
if (content.length < 800) {
return "ssh-ed25519";
@@ -140,7 +141,7 @@ function detectPublicKeyTypeFromContent(publicKeyContent: string): string {
} else if (decodedString.includes("1.3.101.112")) {
return "ssh-ed25519";
}
} catch {}
} catch (error) {}
if (content.length < 400) {
return "ssh-ed25519";
@@ -242,7 +243,7 @@ export function parseSSHKey(
useSSH2 = true;
}
} catch {}
} catch (error) {}
}
if (!useSSH2) {
@@ -268,7 +269,7 @@ export function parseSSHKey(
success: true,
};
}
} catch {}
} catch (error) {}
return {
privateKey: privateKeyData,

View File

@@ -8,6 +8,7 @@ class SystemCrypto {
private jwtSecret: string | null = null;
private databaseKey: Buffer | null = null;
private internalAuthToken: string | null = null;
private credentialSharingKey: Buffer | null = null;
private constructor() {}
@@ -51,17 +52,8 @@ class SystemCrypto {
},
);
}
} catch (fileError) {
databaseLogger.warn("Failed to read .env file for JWT secret", {
operation: "jwt_init_file_read_failed",
error:
fileError instanceof Error ? fileError.message : "Unknown error",
});
}
} catch (fileError) {}
databaseLogger.warn("Generating new JWT secret", {
operation: "jwt_generating_new_secret",
});
await this.generateAndGuideUser();
} catch (error) {
databaseLogger.error("Failed to initialize JWT secret", error, {
@@ -80,29 +72,44 @@ class SystemCrypto {
async initializeDatabaseKey(): Promise<void> {
try {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const envKey = process.env.DATABASE_KEY;
if (envKey && envKey.length >= 64) {
this.databaseKey = Buffer.from(envKey, "hex");
const keyFingerprint = crypto
.createHash("sha256")
.update(this.databaseKey)
.digest("hex")
.substring(0, 16);
return;
}
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
try {
const envContent = await fs.readFile(envPath, "utf8");
const dbKeyMatch = envContent.match(/^DATABASE_KEY=(.+)$/m);
if (dbKeyMatch && dbKeyMatch[1] && dbKeyMatch[1].length >= 64) {
this.databaseKey = Buffer.from(dbKeyMatch[1], "hex");
process.env.DATABASE_KEY = dbKeyMatch[1];
const keyFingerprint = crypto
.createHash("sha256")
.update(this.databaseKey)
.digest("hex")
.substring(0, 16);
return;
} else {
}
} catch {}
} catch (fileError) {}
await this.generateAndGuideDatabaseKey();
} catch (error) {
databaseLogger.error("Failed to initialize database key", error, {
operation: "db_key_init_failed",
dataDir: process.env.DATA_DIR || "./db/data",
});
throw new Error("Database key initialization failed");
}
@@ -134,7 +141,7 @@ class SystemCrypto {
process.env.INTERNAL_AUTH_TOKEN = tokenMatch[1];
return;
}
} catch {}
} catch (error) {}
await this.generateAndGuideInternalAuthToken();
} catch (error) {
@@ -152,6 +159,48 @@ class SystemCrypto {
return this.internalAuthToken!;
}
async initializeCredentialSharingKey(): Promise<void> {
try {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const envKey = process.env.CREDENTIAL_SHARING_KEY;
if (envKey && envKey.length >= 64) {
this.credentialSharingKey = Buffer.from(envKey, "hex");
return;
}
try {
const envContent = await fs.readFile(envPath, "utf8");
const csKeyMatch = envContent.match(/^CREDENTIAL_SHARING_KEY=(.+)$/m);
if (csKeyMatch && csKeyMatch[1] && csKeyMatch[1].length >= 64) {
this.credentialSharingKey = Buffer.from(csKeyMatch[1], "hex");
process.env.CREDENTIAL_SHARING_KEY = csKeyMatch[1];
return;
}
} catch (fileError) {}
await this.generateAndGuideCredentialSharingKey();
} catch (error) {
databaseLogger.error(
"Failed to initialize credential sharing key",
error,
{
operation: "cred_sharing_key_init_failed",
dataDir: process.env.DATA_DIR || "./db/data",
},
);
throw new Error("Credential sharing key initialization failed");
}
}
async getCredentialSharingKey(): Promise<Buffer> {
if (!this.credentialSharingKey) {
await this.initializeCredentialSharingKey();
}
return this.credentialSharingKey!;
}
private async generateAndGuideUser(): Promise<void> {
const newSecret = crypto.randomBytes(32).toString("hex");
const instanceId = crypto.randomBytes(8).toString("hex");
@@ -204,6 +253,26 @@ class SystemCrypto {
);
}
private async generateAndGuideCredentialSharingKey(): Promise<void> {
const newKey = crypto.randomBytes(32);
const newKeyHex = newKey.toString("hex");
const instanceId = crypto.randomBytes(8).toString("hex");
this.credentialSharingKey = newKey;
await this.updateEnvFile("CREDENTIAL_SHARING_KEY", newKeyHex);
databaseLogger.success(
"Credential sharing key auto-generated and saved to .env",
{
operation: "cred_sharing_key_auto_generated",
instanceId,
envVarName: "CREDENTIAL_SHARING_KEY",
note: "Used for offline credential sharing - no restart required",
},
);
}
async validateJWTSecret(): Promise<boolean> {
try {
const secret = await this.getJWTSecret();

View File

@@ -21,8 +21,8 @@ interface EncryptedDEK {
interface UserSession {
dataKey: Buffer;
lastActivity: number;
expiresAt: number;
lastActivity?: number;
}
class UserCrypto {
@@ -33,8 +33,6 @@ class UserCrypto {
private static readonly PBKDF2_ITERATIONS = 100000;
private static readonly KEK_LENGTH = 32;
private static readonly DEK_LENGTH = 32;
private static readonly SESSION_DURATION = 24 * 60 * 60 * 1000;
private static readonly MAX_INACTIVITY = 6 * 60 * 60 * 1000;
private constructor() {
setInterval(
@@ -69,7 +67,10 @@ class UserCrypto {
DEK.fill(0);
}
async setupOIDCUserEncryption(userId: string): Promise<void> {
async setupOIDCUserEncryption(
userId: string,
sessionDurationMs: number,
): Promise<void> {
const existingEncryptedDEK = await this.getEncryptedDEK(userId);
let DEK: Buffer;
@@ -104,14 +105,17 @@ class UserCrypto {
const now = Date.now();
this.userSessions.set(userId, {
dataKey: Buffer.from(DEK),
lastActivity: now,
expiresAt: now + UserCrypto.SESSION_DURATION,
expiresAt: now + sessionDurationMs,
});
DEK.fill(0);
}
async authenticateUser(userId: string, password: string): Promise<boolean> {
async authenticateUser(
userId: string,
password: string,
sessionDurationMs: number,
): Promise<boolean> {
try {
const kekSalt = await this.getKEKSalt(userId);
if (!kekSalt) return false;
@@ -144,8 +148,7 @@ class UserCrypto {
this.userSessions.set(userId, {
dataKey: Buffer.from(DEK),
lastActivity: now,
expiresAt: now + UserCrypto.SESSION_DURATION,
expiresAt: now + sessionDurationMs,
});
DEK.fill(0);
@@ -161,13 +164,49 @@ class UserCrypto {
}
}
async authenticateOIDCUser(userId: string): Promise<boolean> {
async authenticateOIDCUser(
userId: string,
sessionDurationMs: number,
): Promise<boolean> {
try {
const oidcEncryptedDEK = await this.getOIDCEncryptedDEK(userId);
if (oidcEncryptedDEK) {
const systemKey = this.deriveOIDCSystemKey(userId);
const DEK = this.decryptDEK(oidcEncryptedDEK, systemKey);
systemKey.fill(0);
if (!DEK || DEK.length === 0) {
databaseLogger.error(
"Failed to decrypt OIDC DEK for dual-auth user",
{
operation: "oidc_auth_dual_decrypt_failed",
userId,
},
);
return false;
}
const now = Date.now();
const oldSession = this.userSessions.get(userId);
if (oldSession) {
oldSession.dataKey.fill(0);
}
this.userSessions.set(userId, {
dataKey: Buffer.from(DEK),
expiresAt: now + sessionDurationMs,
});
DEK.fill(0);
return true;
}
const kekSalt = await this.getKEKSalt(userId);
const encryptedDEK = await this.getEncryptedDEK(userId);
if (!kekSalt || !encryptedDEK) {
await this.setupOIDCUserEncryption(userId);
await this.setupOIDCUserEncryption(userId, sessionDurationMs);
return true;
}
@@ -176,7 +215,7 @@ class UserCrypto {
systemKey.fill(0);
if (!DEK || DEK.length === 0) {
await this.setupOIDCUserEncryption(userId);
await this.setupOIDCUserEncryption(userId, sessionDurationMs);
return true;
}
@@ -189,15 +228,19 @@ class UserCrypto {
this.userSessions.set(userId, {
dataKey: Buffer.from(DEK),
lastActivity: now,
expiresAt: now + UserCrypto.SESSION_DURATION,
expiresAt: now + sessionDurationMs,
});
DEK.fill(0);
return true;
} catch {
await this.setupOIDCUserEncryption(userId);
} catch (error) {
databaseLogger.error("OIDC authentication failed", error, {
operation: "oidc_auth_error",
userId,
error: error instanceof Error ? error.message : "Unknown",
});
await this.setupOIDCUserEncryption(userId, sessionDurationMs);
return true;
}
}
@@ -219,16 +262,6 @@ class UserCrypto {
return null;
}
if (now - session.lastActivity > UserCrypto.MAX_INACTIVITY) {
this.userSessions.delete(userId);
session.dataKey.fill(0);
if (this.sessionExpiredCallback) {
this.sessionExpiredCallback(userId);
}
return null;
}
session.lastActivity = now;
return session.dataKey;
}
@@ -331,6 +364,83 @@ class UserCrypto {
}
}
/**
* Convert a password-based user's encryption to DUAL-AUTH encryption.
* This is used when linking an OIDC account to a password account for dual-auth.
*
* IMPORTANT: This does NOT delete the password-based KEK salt!
* The user needs to maintain BOTH password and OIDC authentication methods.
* We keep the password KEK salt so password login still works.
* We also store the DEK encrypted with OIDC system key for OIDC login.
*/
async convertToOIDCEncryption(userId: string): Promise<void> {
try {
const existingEncryptedDEK = await this.getEncryptedDEK(userId);
const existingKEKSalt = await this.getKEKSalt(userId);
if (!existingEncryptedDEK && !existingKEKSalt) {
databaseLogger.warn("No existing encryption to convert for user", {
operation: "convert_to_oidc_encryption_skip",
userId,
});
return;
}
const existingDEK = this.getUserDataKey(userId);
if (!existingDEK) {
throw new Error(
"Cannot convert to OIDC encryption - user session not active. Please log in with password first.",
);
}
const systemKey = this.deriveOIDCSystemKey(userId);
const oidcEncryptedDEK = this.encryptDEK(existingDEK, systemKey);
systemKey.fill(0);
const key = `user_encrypted_dek_oidc_${userId}`;
const value = JSON.stringify(oidcEncryptedDEK);
const { getDb } = await import("../database/db/index.js");
const { settings } = await import("../database/db/schema.js");
const { eq } = await import("drizzle-orm");
const existing = await getDb()
.select()
.from(settings)
.where(eq(settings.key, key));
if (existing.length > 0) {
await getDb()
.update(settings)
.set({ value })
.where(eq(settings.key, key));
} else {
await getDb().insert(settings).values({ key, value });
}
databaseLogger.info(
"Converted user encryption to dual-auth (password + OIDC)",
{
operation: "convert_to_oidc_encryption_preserved",
userId,
},
);
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
await saveMemoryDatabaseToFile();
} catch (error) {
databaseLogger.error("Failed to convert to OIDC encryption", error, {
operation: "convert_to_oidc_encryption_error",
userId,
error: error instanceof Error ? error.message : "Unknown error",
});
throw error;
}
}
private async validatePassword(
userId: string,
password: string,
@@ -359,10 +469,7 @@ class UserCrypto {
const expiredUsers: string[] = [];
for (const [userId, session] of this.userSessions.entries()) {
if (
now > session.expiresAt ||
now - session.lastActivity > UserCrypto.MAX_INACTIVITY
) {
if (now > session.expiresAt) {
session.dataKey.fill(0);
expiredUsers.push(userId);
}
@@ -512,6 +619,26 @@ class UserCrypto {
return null;
}
}
private async getOIDCEncryptedDEK(
userId: string,
): Promise<EncryptedDEK | null> {
try {
const key = `user_encrypted_dek_oidc_${userId}`;
const result = await getDb()
.select()
.from(settings)
.where(eq(settings.key, key));
if (result.length === 0) {
return null;
}
return JSON.parse(result[0].value);
} catch {
return null;
}
}
}
export { UserCrypto, type KEKSalt, type EncryptedDEK };

View File

@@ -177,30 +177,57 @@ class UserDataImport {
continue;
}
const tempId = `import-ssh-${targetUserId}-${Date.now()}-${imported}`;
const newHostData = {
const existing = await getDb()
.select()
.from(sshData)
.where(
and(
eq(sshData.userId, targetUserId),
eq(sshData.ip, host.ip as string),
eq(sshData.port, host.port as number),
eq(sshData.username, host.username as string),
),
);
if (existing.length > 0 && !options.replaceExisting) {
skipped++;
continue;
}
const newHostData: any = {
...host,
id: tempId,
userId: targetUserId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
let processedHostData = newHostData;
if (existing.length === 0) {
newHostData.createdAt = new Date().toISOString();
}
let processedHostData: any = newHostData;
if (options.userDataKey) {
processedHostData = DataCrypto.encryptRecord(
"ssh_data",
newHostData,
targetUserId,
options.userDataKey,
);
) as Record<string, unknown>;
}
delete processedHostData.id;
await getDb()
.insert(sshData)
.values(processedHostData as unknown as typeof sshData.$inferInsert);
if (existing.length > 0 && options.replaceExisting) {
await getDb()
.update(sshData)
.set(processedHostData as unknown as typeof sshData.$inferInsert)
.where(eq(sshData.id, existing[0].id));
} else {
await getDb()
.insert(sshData)
.values(
processedHostData as unknown as typeof sshData.$inferInsert,
);
}
imported++;
} catch (error) {
errors.push(
@@ -233,34 +260,59 @@ class UserDataImport {
continue;
}
const tempCredId = `import-cred-${targetUserId}-${Date.now()}-${imported}`;
const newCredentialData = {
const existing = await getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.userId, targetUserId),
eq(sshCredentials.name, credential.name as string),
),
);
if (existing.length > 0 && !options.replaceExisting) {
skipped++;
continue;
}
const newCredentialData: any = {
...credential,
id: tempCredId,
userId: targetUserId,
usageCount: 0,
lastUsed: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
let processedCredentialData = newCredentialData;
if (existing.length === 0) {
newCredentialData.usageCount = 0;
newCredentialData.lastUsed = null;
newCredentialData.createdAt = new Date().toISOString();
}
let processedCredentialData: any = newCredentialData;
if (options.userDataKey) {
processedCredentialData = DataCrypto.encryptRecord(
"ssh_credentials",
newCredentialData,
targetUserId,
options.userDataKey,
);
) as Record<string, unknown>;
}
delete processedCredentialData.id;
await getDb()
.insert(sshCredentials)
.values(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
);
if (existing.length > 0 && options.replaceExisting) {
await getDb()
.update(sshCredentials)
.set(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
)
.where(eq(sshCredentials.id, existing[0].id));
} else {
await getDb()
.insert(sshCredentials)
.values(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
);
}
imported++;
} catch (error) {
errors.push(

View File

@@ -0,0 +1,155 @@
import * as React from "react";
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
import { cn } from "@/lib/utils";
import { buttonVariants } from "@/components/ui/button";
function AlertDialog({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Root>) {
return <AlertDialogPrimitive.Root data-slot="alert-dialog" {...props} />;
}
function AlertDialogTrigger({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Trigger>) {
return (
<AlertDialogPrimitive.Trigger data-slot="alert-dialog-trigger" {...props} />
);
}
function AlertDialogPortal({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Portal>) {
return (
<AlertDialogPrimitive.Portal data-slot="alert-dialog-portal" {...props} />
);
}
function AlertDialogOverlay({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Overlay>) {
return (
<AlertDialogPrimitive.Overlay
data-slot="alert-dialog-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
className,
)}
{...props}
/>
);
}
function AlertDialogContent({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Content>) {
return (
<AlertDialogPortal>
<AlertDialogOverlay />
<AlertDialogPrimitive.Content
data-slot="alert-dialog-content"
className={cn(
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-50 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
className,
)}
{...props}
/>
</AlertDialogPortal>
);
}
function AlertDialogHeader({
className,
...props
}: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-dialog-header"
className={cn("flex flex-col gap-2 text-center sm:text-left", className)}
{...props}
/>
);
}
function AlertDialogFooter({
className,
...props
}: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-dialog-footer"
className={cn(
"flex flex-col-reverse gap-2 sm:flex-row sm:justify-end",
className,
)}
{...props}
/>
);
}
function AlertDialogTitle({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Title>) {
return (
<AlertDialogPrimitive.Title
data-slot="alert-dialog-title"
className={cn("text-lg font-semibold", className)}
{...props}
/>
);
}
function AlertDialogDescription({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Description>) {
return (
<AlertDialogPrimitive.Description
data-slot="alert-dialog-description"
className={cn("text-muted-foreground text-sm", className)}
{...props}
/>
);
}
function AlertDialogAction({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Action>) {
return (
<AlertDialogPrimitive.Action
className={cn(buttonVariants(), className)}
{...props}
/>
);
}
function AlertDialogCancel({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Cancel>) {
return (
<AlertDialogPrimitive.Cancel
className={cn(buttonVariants({ variant: "outline" }), className)}
{...props}
/>
);
}
export {
AlertDialog,
AlertDialogPortal,
AlertDialogOverlay,
AlertDialogTrigger,
AlertDialogContent,
AlertDialogHeader,
AlertDialogFooter,
AlertDialogTitle,
AlertDialogDescription,
AlertDialogAction,
AlertDialogCancel,
};

View File

@@ -15,7 +15,7 @@ const badgeVariants = cva(
secondary:
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
destructive:
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
"border-transparent bg-destructive text-foreground [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
},

View File

@@ -6,14 +6,14 @@ import { cva, type VariantProps } from "class-variance-authority";
import { cn } from "@/lib/utils";
const buttonVariants = cva(
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-all disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-all duration-100 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive active:scale-95",
{
variants: {
variant: {
default:
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
destructive:
"bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
"bg-destructive text-foreground shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
secondary:

View File

@@ -7,7 +7,7 @@ function Card({ className, ...props }: React.ComponentProps<"div">) {
<div
data-slot="card"
className={cn(
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
"bg-elevated text-foreground flex flex-col gap-6 rounded-lg border-2 border-edge py-6 shadow-sm",
className,
)}
{...props}

View File

@@ -0,0 +1,184 @@
"use client";
import * as React from "react";
import { Command as CommandPrimitive } from "cmdk";
import { SearchIcon } from "lucide-react";
import { cn } from "@/lib/utils";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
function Command({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive>) {
return (
<CommandPrimitive
data-slot="command"
className={cn(
"bg-popover text-popover-foreground flex h-full w-full flex-col overflow-hidden rounded-md",
className,
)}
{...props}
/>
);
}
function CommandDialog({
title = "Command Palette",
description = "Search for a command to run...",
children,
className,
showCloseButton = true,
...props
}: React.ComponentProps<typeof Dialog> & {
title?: string;
description?: string;
className?: string;
showCloseButton?: boolean;
}) {
return (
<Dialog {...props}>
<DialogHeader className="sr-only">
<DialogTitle>{title}</DialogTitle>
<DialogDescription>{description}</DialogDescription>
</DialogHeader>
<DialogContent
className={cn("overflow-hidden p-0", className)}
showCloseButton={showCloseButton}
>
<Command className="[&_[cmdk-group-heading]]:text-muted-foreground **:data-[slot=command-input-wrapper]:h-12 [&_[cmdk-group-heading]]:px-2 [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group]]:px-2 [&_[cmdk-group]:not([hidden])_~[cmdk-group]]:pt-0 [&_[cmdk-input-wrapper]_svg]:h-5 [&_[cmdk-input-wrapper]_svg]:w-5 [&_[cmdk-input]]:h-12 [&_[cmdk-item]]:px-2 [&_[cmdk-item]]:py-3 [&_[cmdk-item]_svg]:h-5 [&_[cmdk-item]_svg]:w-5">
{children}
</Command>
</DialogContent>
</Dialog>
);
}
function CommandInput({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive.Input>) {
return (
<div
data-slot="command-input-wrapper"
className="flex h-9 items-center gap-2 border-b px-3"
>
<SearchIcon className="size-4 shrink-0 opacity-50" />
<CommandPrimitive.Input
data-slot="command-input"
className={cn(
"placeholder:text-muted-foreground flex h-10 w-full rounded-md bg-transparent py-3 text-sm outline-hidden disabled:cursor-not-allowed disabled:opacity-50",
className,
)}
{...props}
/>
</div>
);
}
function CommandList({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive.List>) {
return (
<CommandPrimitive.List
data-slot="command-list"
className={cn(
"max-h-[300px] scroll-py-1 overflow-x-hidden overflow-y-auto thin-scrollbar",
className,
)}
{...props}
/>
);
}
function CommandEmpty({
...props
}: React.ComponentProps<typeof CommandPrimitive.Empty>) {
return (
<CommandPrimitive.Empty
data-slot="command-empty"
className="py-6 text-center text-sm"
{...props}
/>
);
}
function CommandGroup({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive.Group>) {
return (
<CommandPrimitive.Group
data-slot="command-group"
className={cn(
"text-foreground [&_[cmdk-group-heading]]:text-muted-foreground overflow-hidden p-1 [&_[cmdk-group-heading]]:px-2 [&_[cmdk-group-heading]]:py-1.5 [&_[cmdk-group-heading]]:text-xs [&_[cmdk-group-heading]]:font-medium",
className,
)}
{...props}
/>
);
}
function CommandSeparator({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive.Separator>) {
return (
<CommandPrimitive.Separator
data-slot="command-separator"
className={cn("bg-border -mx-1 h-px", className)}
{...props}
/>
);
}
function CommandItem({
className,
...props
}: React.ComponentProps<typeof CommandPrimitive.Item>) {
return (
<CommandPrimitive.Item
data-slot="command-item"
className={cn(
"data-[selected=true]:bg-accent data-[selected=true]:text-accent-foreground [&_svg:not([class*='text-'])]:text-muted-foreground relative flex cursor-default items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-hidden select-none data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
className,
)}
{...props}
/>
);
}
function CommandShortcut({
className,
...props
}: React.ComponentProps<"span">) {
return (
<span
data-slot="command-shortcut"
className={cn(
"text-muted-foreground ml-auto text-xs tracking-widest",
className,
)}
{...props}
/>
);
}
export {
Command,
CommandDialog,
CommandInput,
CommandList,
CommandEmpty,
CommandGroup,
CommandItem,
CommandShortcut,
CommandSeparator,
};

View File

@@ -0,0 +1,141 @@
import * as React from "react";
import * as DialogPrimitive from "@radix-ui/react-dialog";
import { XIcon } from "lucide-react";
import { cn } from "@/lib/utils";
function Dialog({
...props
}: React.ComponentProps<typeof DialogPrimitive.Root>) {
return <DialogPrimitive.Root data-slot="dialog" {...props} />;
}
function DialogTrigger({
...props
}: React.ComponentProps<typeof DialogPrimitive.Trigger>) {
return <DialogPrimitive.Trigger data-slot="dialog-trigger" {...props} />;
}
function DialogPortal({
...props
}: React.ComponentProps<typeof DialogPrimitive.Portal>) {
return <DialogPrimitive.Portal data-slot="dialog-portal" {...props} />;
}
function DialogClose({
...props
}: React.ComponentProps<typeof DialogPrimitive.Close>) {
return <DialogPrimitive.Close data-slot="dialog-close" {...props} />;
}
function DialogOverlay({
className,
...props
}: React.ComponentProps<typeof DialogPrimitive.Overlay>) {
return (
<DialogPrimitive.Overlay
data-slot="dialog-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
className,
)}
{...props}
/>
);
}
function DialogContent({
className,
children,
showCloseButton = true,
...props
}: React.ComponentProps<typeof DialogPrimitive.Content> & {
showCloseButton?: boolean;
}) {
return (
<DialogPortal data-slot="dialog-portal">
<DialogOverlay />
<DialogPrimitive.Content
data-slot="dialog-content"
className={cn(
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-50 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
className,
)}
{...props}
>
{children}
{showCloseButton && (
<DialogPrimitive.Close
data-slot="dialog-close"
className="ring-offset-background focus:ring-ring data-[state=open]:bg-accent data-[state=open]:text-muted-foreground absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4"
>
<XIcon />
<span className="sr-only">Close</span>
</DialogPrimitive.Close>
)}
</DialogPrimitive.Content>
</DialogPortal>
);
}
function DialogHeader({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="dialog-header"
className={cn("flex flex-col gap-2 text-center sm:text-left", className)}
{...props}
/>
);
}
function DialogFooter({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="dialog-footer"
className={cn(
"flex flex-col-reverse gap-2 sm:flex-row sm:justify-end",
className,
)}
{...props}
/>
);
}
function DialogTitle({
className,
...props
}: React.ComponentProps<typeof DialogPrimitive.Title>) {
return (
<DialogPrimitive.Title
data-slot="dialog-title"
className={cn("text-lg leading-none font-semibold", className)}
{...props}
/>
);
}
function DialogDescription({
className,
...props
}: React.ComponentProps<typeof DialogPrimitive.Description>) {
return (
<DialogPrimitive.Description
data-slot="dialog-description"
className={cn("text-muted-foreground text-sm", className)}
{...props}
/>
);
}
export {
Dialog,
DialogClose,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogOverlay,
DialogPortal,
DialogTitle,
DialogTrigger,
};

View File

@@ -8,7 +8,7 @@ function Input({ className, type, ...props }: React.ComponentProps<"input">) {
type={type}
data-slot="input"
className={cn(
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground bg-elevated dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
"focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px]",
"aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
className,

28
src/components/ui/kbd.tsx Normal file
View File

@@ -0,0 +1,28 @@
import { cn } from "@/lib/utils";
function Kbd({ className, ...props }: React.ComponentProps<"kbd">) {
return (
<kbd
data-slot="kbd"
className={cn(
"bg-muted text-muted-foreground pointer-events-none inline-flex h-5 w-fit min-w-5 items-center justify-center gap-1 rounded-sm px-1 font-sans text-xs font-medium select-none",
"[&_svg:not([class*='size-'])]:size-3",
"[[data-slot=tooltip-content]_&]:bg-background/20 [[data-slot=tooltip-content]_&]:text-background dark:[[data-slot=tooltip-content]_&]:bg-background/10",
className,
)}
{...props}
/>
);
}
function KbdGroup({ className, ...props }: React.ComponentProps<"div">) {
return (
<kbd
data-slot="kbd-group"
className={cn("inline-flex items-center gap-1", className)}
{...props}
/>
);
}
export { Kbd, KbdGroup };

View File

@@ -37,13 +37,13 @@ function ResizableHandle({
<ResizablePrimitive.PanelResizeHandle
data-slot="resizable-handle"
className={cn(
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed transition-colors duration-150",
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-edge-hover hover:bg-interact active:bg-pressed transition-colors duration-150",
className,
)}
{...props}
>
{withHandle && (
<div className="bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
<div className="bg-edge-hover hover:bg-interact active:bg-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
<GripVerticalIcon className="size-2.5" />
</div>
)}

View File

@@ -59,7 +59,7 @@ function SelectContent({
<SelectPrimitive.Content
data-slot="select-content"
className={cn(
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border shadow-md",
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto thin-scrollbar rounded-md border shadow-md",
position === "popper" &&
"data-[side=bottom]:translate-y-1 data-[side=left]:-translate-x-1 data-[side=right]:translate-x-1 data-[side=top]:-translate-y-1",
className,

View File

@@ -34,7 +34,7 @@ function SheetOverlay({
<SheetPrimitive.Overlay
data-slot="sheet-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50 data-[state=closed]:pointer-events-none",
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay data-[state=closed]:pointer-events-none",
className,
)}
{...props}

View File

@@ -365,7 +365,7 @@ function SidebarContent({ className, ...props }: React.ComponentProps<"div">) {
data-slot="sidebar-content"
data-sidebar="content"
className={cn(
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden",
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto thin-scrollbar group-data-[collapsible=icon]:overflow-hidden",
className,
)}
{...props}

View File

@@ -51,7 +51,7 @@ function Slider({
<SliderPrimitive.Thumb
data-slot="slider-thumb"
key={index}
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-white shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-elevated shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
/>
))}
</SliderPrimitive.Root>

View File

@@ -1,4 +1,4 @@
import { useTheme } from "next-themes";
import { useTheme } from "@/components/theme-provider";
import { Toaster as Sonner, type ToasterProps, toast } from "sonner";
import { useRef } from "react";

View File

@@ -6,7 +6,7 @@ function Table({ className, ...props }: React.ComponentProps<"table">) {
return (
<div
data-slot="table-container"
className="relative w-full overflow-x-auto"
className="relative w-full overflow-x-auto thin-scrollbar"
>
<table
data-slot="table"

View File

@@ -40,7 +40,7 @@ function TabsTrigger({
<TabsPrimitive.Trigger
data-slot="tabs-trigger"
className={cn(
"data-[state=active]:bg-background dark:data-[state=active]:text-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:outline-ring dark:data-[state=active]:border-input dark:data-[state=active]:bg-input/30 text-foreground dark:text-muted-foreground inline-flex h-[calc(100%-1px)] flex-1 items-center justify-center gap-1.5 rounded-md border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-[color,box-shadow] focus-visible:ring-[3px] focus-visible:outline-1 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:shadow-sm [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
"data-[state=active]:bg-background dark:data-[state=active]:text-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:outline-ring dark:data-[state=active]:border-input dark:data-[state=active]:bg-input/30 text-foreground dark:text-muted-foreground inline-flex h-[calc(100%-1px)] flex-1 items-center justify-center gap-1.5 rounded-md border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap transition-[color,box-shadow] duration-200 focus-visible:ring-[3px] focus-visible:outline-1 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:shadow-sm [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
className,
)}
{...props}
@@ -55,7 +55,13 @@ function TabsContent({
return (
<TabsPrimitive.Content
data-slot="tabs-content"
className={cn("flex-1 outline-none", className)}
className={cn(
"flex-1 outline-none",
"data-[state=active]:animate-in data-[state=inactive]:animate-out",
"data-[state=inactive]:fade-out-0 data-[state=active]:fade-in-0",
"duration-150",
className,
)}
{...props}
/>
);

View File

@@ -9,7 +9,7 @@ const Textarea = React.forwardRef<HTMLTextAreaElement, TextareaProps>(
return (
<textarea
className={cn(
"flex min-h-[80px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50",
"flex min-h-[80px] w-full rounded-md border border-input bg-transparent dark:bg-input/30 px-3 py-2 text-sm shadow-xs transition-[color,box-shadow] duration-200 outline-none placeholder:text-muted-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50 disabled:pointer-events-none",
className,
)}
ref={ref}

View File

@@ -36,7 +36,7 @@ function TooltipTrigger({
function TooltipContent({
className,
sideOffset = 0,
sideOffset = 4,
children,
...props
}: React.ComponentProps<typeof TooltipPrimitive.Content>) {
@@ -46,7 +46,7 @@ function TooltipContent({
data-slot="tooltip-content"
sideOffset={sideOffset}
className={cn(
"bg-primary text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
"bg-elevated text-foreground border border-edge-medium shadow-lg animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
className,
)}
{...props}

View File

@@ -56,6 +56,62 @@ export const TERMINAL_THEMES: Record<string, TerminalTheme> = {
},
},
termixDark: {
name: "Termix Dark",
category: "dark",
colors: {
background: "#18181b",
foreground: "#f7f7f7",
cursor: "#f7f7f7",
cursorAccent: "#18181b",
selectionBackground: "#3a3a3d",
black: "#2e3436",
red: "#cc0000",
green: "#4e9a06",
yellow: "#c4a000",
blue: "#3465a4",
magenta: "#75507b",
cyan: "#06989a",
white: "#d3d7cf",
brightBlack: "#555753",
brightRed: "#ef2929",
brightGreen: "#8ae234",
brightYellow: "#fce94f",
brightBlue: "#729fcf",
brightMagenta: "#ad7fa8",
brightCyan: "#34e2e2",
brightWhite: "#eeeeec",
},
},
termixLight: {
name: "Termix Light",
category: "light",
colors: {
background: "#ffffff",
foreground: "#18181b",
cursor: "#18181b",
cursorAccent: "#ffffff",
selectionBackground: "#d1d5db",
black: "#18181b",
red: "#dc2626",
green: "#16a34a",
yellow: "#ca8a04",
blue: "#2563eb",
magenta: "#9333ea",
cyan: "#0891b2",
white: "#f4f4f5",
brightBlack: "#71717a",
brightRed: "#ef4444",
brightGreen: "#22c55e",
brightYellow: "#eab308",
brightBlue: "#3b82f6",
brightMagenta: "#a855f7",
brightCyan: "#06b6d4",
brightWhite: "#ffffff",
},
},
dracula: {
name: "Dracula",
category: "dark",
@@ -617,7 +673,6 @@ export const TERMINAL_THEMES: Record<string, TerminalTheme> = {
},
};
// Font families available for terminal
export const TERMINAL_FONTS = [
{
value: "Caskaydia Cove Nerd Font Mono",
@@ -690,7 +745,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
fontSize: 14,
fontFamily: "Caskaydia Cove Nerd Font Mono",
letterSpacing: 0,
lineHeight: 1.2,
lineHeight: 1.0,
theme: "termix",
scrollback: 10000,
@@ -706,6 +761,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
startupSnippetId: null as number | null,
autoMosh: false,
moshCommand: "mosh-server new -s -l LANG=en_US.UTF-8",
sudoPasswordAutoFill: false,
};
export type TerminalConfigType = typeof DEFAULT_TERMINAL_CONFIG;

Some files were not shown because too many files have changed in this diff Show More