Merge branch 'dev-1.10.1' into feature/add-network-graph

This commit is contained in:
Luke Gustafson
2026-01-12 03:05:50 -05:00
committed by GitHub
268 changed files with 156304 additions and 17798 deletions

View File

@@ -2,8 +2,8 @@ import express from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import { getDb } from "./database/db/index.js";
import { recentActivity, sshData } from "./database/db/schema.js";
import { eq, and, desc } from "drizzle-orm";
import { recentActivity, sshData, hostAccess } from "./database/db/schema.js";
import { eq, and, desc, or } from "drizzle-orm";
import { dashboardLogger } from "./utils/logger.js";
import { SimpleDBOps } from "./utils/simple-db-ops.js";
import { AuthManager } from "./utils/auth-manager.js";
@@ -15,7 +15,7 @@ const authManager = AuthManager.getInstance();
const serverStartTime = Date.now();
const activityRateLimiter = new Map<string, number>();
const RATE_LIMIT_MS = 1000; // 1 second window
const RATE_LIMIT_MS = 1000;
app.use(
cors({
@@ -127,9 +127,18 @@ app.post("/activity/log", async (req, res) => {
});
}
if (type !== "terminal" && type !== "file_manager") {
if (
![
"terminal",
"file_manager",
"server_stats",
"tunnel",
"docker",
].includes(type)
) {
return res.status(400).json({
error: "Invalid activity type. Must be 'terminal' or 'file_manager'",
error:
"Invalid activity type. Must be 'terminal', 'file_manager', 'server_stats', 'tunnel', or 'docker'",
});
}
@@ -155,7 +164,7 @@ app.post("/activity/log", async (req, res) => {
entriesToDelete.forEach((key) => activityRateLimiter.delete(key));
}
const hosts = await SimpleDBOps.select(
const ownedHosts = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
@@ -164,8 +173,19 @@ app.post("/activity/log", async (req, res) => {
userId,
);
if (hosts.length === 0) {
return res.status(404).json({ error: "Host not found" });
if (ownedHosts.length === 0) {
const sharedHosts = await getDb()
.select()
.from(hostAccess)
.where(
and(eq(hostAccess.hostId, hostId), eq(hostAccess.userId, userId)),
);
if (sharedHosts.length === 0) {
return res
.status(404)
.json({ error: "Host not found or access denied" });
}
}
const result = (await SimpleDBOps.insert(

View File

@@ -9,6 +9,7 @@ import credentialsRoutes from "./routes/credentials.js";
import snippetsRoutes from "./routes/snippets.js";
import terminalRoutes from "./routes/terminal.js";
import networkTopologyRoutes from "./routes/network-topology.js";
import rbacRoutes from "./routes/rbac.js";
import cors from "cors";
import fetch from "node-fetch";
import fs from "fs";
@@ -1438,6 +1439,7 @@ app.use("/credentials", credentialsRoutes);
app.use("/snippets", snippetsRoutes);
app.use("/terminal", terminalRoutes);
app.use("/network-topology", networkTopologyRoutes);
app.use("/rbac", rbacRoutes);
app.use(
(

View File

@@ -201,13 +201,21 @@ async function initializeCompleteDatabase(): Promise<void> {
enable_tunnel INTEGER NOT NULL DEFAULT 1,
tunnel_connections TEXT,
enable_file_manager INTEGER NOT NULL DEFAULT 1,
enable_docker INTEGER NOT NULL DEFAULT 0,
default_path TEXT,
autostart_password TEXT,
autostart_key TEXT,
autostart_key_password TEXT,
force_keyboard_interactive TEXT,
stats_config TEXT,
docker_config TEXT,
terminal_config TEXT,
notes TEXT,
use_socks5 INTEGER,
socks5_host TEXT,
socks5_port INTEGER,
socks5_username TEXT,
socks5_password TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
@@ -328,6 +336,81 @@ async function initializeCompleteDatabase(): Promise<void> {
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS host_access (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT,
role_id INTEGER,
granted_by TEXT NOT NULL,
permission_level TEXT NOT NULL DEFAULT 'use',
expires_at TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_accessed_at TEXT,
access_count INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
display_name TEXT NOT NULL,
description TEXT,
is_system INTEGER NOT NULL DEFAULT 0,
permissions TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS user_roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
role_id INTEGER NOT NULL,
granted_by TEXT,
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, role_id),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
);
CREATE TABLE IF NOT EXISTS audit_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
username TEXT NOT NULL,
action TEXT NOT NULL,
resource_type TEXT NOT NULL,
resource_id TEXT,
resource_name TEXT,
details TEXT,
ip_address TEXT,
user_agent TEXT,
success INTEGER NOT NULL,
error_message TEXT,
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS session_recordings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT NOT NULL,
access_id INTEGER,
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
ended_at TEXT,
duration INTEGER,
commands TEXT,
dangerous_actions TEXT,
recording_path TEXT,
terminated_by_owner INTEGER DEFAULT 0,
termination_reason TEXT,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
);
`);
try {
@@ -486,11 +569,30 @@ const migrateSchema = () => {
addColumnIfNotExists("ssh_data", "stats_config", "TEXT");
addColumnIfNotExists("ssh_data", "terminal_config", "TEXT");
addColumnIfNotExists("ssh_data", "quick_actions", "TEXT");
addColumnIfNotExists(
"ssh_data",
"enable_docker",
"INTEGER NOT NULL DEFAULT 0",
);
addColumnIfNotExists("ssh_data", "docker_config", "TEXT");
addColumnIfNotExists("ssh_data", "notes", "TEXT");
addColumnIfNotExists("ssh_data", "use_socks5", "INTEGER");
addColumnIfNotExists("ssh_data", "socks5_host", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_port", "INTEGER");
addColumnIfNotExists("ssh_data", "socks5_username", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_password", "TEXT");
addColumnIfNotExists("ssh_data", "socks5_proxy_chain", "TEXT");
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "public_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "detected_key_type", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_password", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "system_key_password", "TEXT");
addColumnIfNotExists("file_manager_recent", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
@@ -569,12 +671,316 @@ const migrateSchema = () => {
`);
} catch (createError) {
databaseLogger.warn("Failed to create network_topology table", {
sqlite.prepare("SELECT id FROM host_access LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS host_access (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT,
role_id INTEGER,
granted_by TEXT NOT NULL,
permission_level TEXT NOT NULL DEFAULT 'use',
expires_at TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_accessed_at TEXT,
access_count INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create host_access table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT role_id FROM host_access LIMIT 1").get();
} catch {
try {
sqlite.exec("ALTER TABLE host_access ADD COLUMN role_id INTEGER REFERENCES roles(id) ON DELETE CASCADE");
} catch (alterError) {
databaseLogger.warn("Failed to add role_id column", {
operation: "schema_migration",
error: alterError,
});
}
}
try {
sqlite.prepare("SELECT sudo_password FROM ssh_data LIMIT 1").get();
} catch {
try {
sqlite.exec("ALTER TABLE ssh_data ADD COLUMN sudo_password TEXT");
} catch (alterError) {
databaseLogger.warn("Failed to add sudo_password column", {
operation: "schema_migration",
error: alterError,
});
}
}
try {
sqlite.prepare("SELECT id FROM roles LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
display_name TEXT NOT NULL,
description TEXT,
is_system INTEGER NOT NULL DEFAULT 0,
permissions TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create roles table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM user_roles LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS user_roles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
role_id INTEGER NOT NULL,
granted_by TEXT,
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, role_id),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create user_roles table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM audit_logs LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS audit_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
username TEXT NOT NULL,
action TEXT NOT NULL,
resource_type TEXT NOT NULL,
resource_id TEXT,
resource_name TEXT,
details TEXT,
ip_address TEXT,
user_agent TEXT,
success INTEGER NOT NULL,
error_message TEXT,
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create audit_logs table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM session_recordings LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS session_recordings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_id INTEGER NOT NULL,
user_id TEXT NOT NULL,
access_id INTEGER,
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
ended_at TEXT,
duration INTEGER,
commands TEXT,
dangerous_actions TEXT,
recording_path TEXT,
terminated_by_owner INTEGER DEFAULT 0,
termination_reason TEXT,
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create session_recordings table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
sqlite.prepare("SELECT id FROM shared_credentials LIMIT 1").get();
} catch {
try {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS shared_credentials (
id INTEGER PRIMARY KEY AUTOINCREMENT,
host_access_id INTEGER NOT NULL,
original_credential_id INTEGER NOT NULL,
target_user_id TEXT NOT NULL,
encrypted_username TEXT NOT NULL,
encrypted_auth_type TEXT NOT NULL,
encrypted_password TEXT,
encrypted_key TEXT,
encrypted_key_password TEXT,
encrypted_key_type TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
needs_re_encryption INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (host_access_id) REFERENCES host_access (id) ON DELETE CASCADE,
FOREIGN KEY (original_credential_id) REFERENCES ssh_credentials (id) ON DELETE CASCADE,
FOREIGN KEY (target_user_id) REFERENCES users (id) ON DELETE CASCADE
);
`);
} catch (createError) {
databaseLogger.warn("Failed to create shared_credentials table", {
operation: "schema_migration",
error: createError,
});
}
}
try {
const existingRoles = sqlite.prepare("SELECT name, is_system FROM roles").all() as Array<{ name: string; is_system: number }>;
try {
const validSystemRoles = ['admin', 'user'];
const unwantedRoleNames = ['superAdmin', 'powerUser', 'readonly', 'member'];
let deletedCount = 0;
const deleteByName = sqlite.prepare("DELETE FROM roles WHERE name = ?");
for (const roleName of unwantedRoleNames) {
const result = deleteByName.run(roleName);
if (result.changes > 0) {
deletedCount += result.changes;
}
}
const deleteOldSystemRole = sqlite.prepare("DELETE FROM roles WHERE name = ? AND is_system = 1");
for (const role of existingRoles) {
if (role.is_system === 1 && !validSystemRoles.includes(role.name) && !unwantedRoleNames.includes(role.name)) {
const result = deleteOldSystemRole.run(role.name);
if (result.changes > 0) {
deletedCount += result.changes;
}
}
}
} catch (cleanupError) {
databaseLogger.warn("Failed to clean up old system roles", {
operation: "schema_migration",
error: cleanupError,
});
}
const systemRoles = [
{
name: "admin",
displayName: "rbac.roles.admin",
description: "Administrator with full access",
permissions: null,
},
{
name: "user",
displayName: "rbac.roles.user",
description: "Regular user",
permissions: null,
},
];
for (const role of systemRoles) {
const existingRole = sqlite.prepare("SELECT id FROM roles WHERE name = ?").get(role.name);
if (!existingRole) {
try {
sqlite.prepare(`
INSERT INTO roles (name, display_name, description, is_system, permissions)
VALUES (?, ?, ?, 1, ?)
`).run(role.name, role.displayName, role.description, role.permissions);
} catch (insertError) {
databaseLogger.warn(`Failed to create system role: ${role.name}`, {
operation: "schema_migration",
error: insertError,
});
}
}
}
try {
const adminUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 1").all() as { id: string }[];
const normalUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 0").all() as { id: string }[];
const adminRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'admin'").get() as { id: number } | undefined;
const userRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'user'").get() as { id: number } | undefined;
if (adminRole) {
const insertUserRole = sqlite.prepare(`
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
`);
for (const admin of adminUsers) {
try {
insertUserRole.run(admin.id, adminRole.id);
} catch (error) {
// Ignore duplicate errors
}
}
}
if (userRole) {
const insertUserRole = sqlite.prepare(`
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
`);
for (const user of normalUsers) {
try {
insertUserRole.run(user.id, userRole.id);
} catch (error) {
// Ignore duplicate errors
}
}
}
} catch (migrationError) {
databaseLogger.warn("Failed to migrate existing users to roles", {
operation: "schema_migration",
error: migrationError,
});
}
} catch (seedError) {
databaseLogger.warn("Failed to seed system roles", {
operation: "schema_migration",
error: seedError,
});
}
databaseLogger.success("Schema migration completed", {
operation: "schema_migration",
});

View File

@@ -66,6 +66,7 @@ export const sshData = sqliteTable("ssh_data", {
key: text("key", { length: 8192 }),
key_password: text("key_password"),
keyType: text("key_type"),
sudoPassword: text("sudo_password"),
autostartPassword: text("autostart_password"),
autostartKey: text("autostart_key", { length: 8192 }),
@@ -86,10 +87,22 @@ export const sshData = sqliteTable("ssh_data", {
enableFileManager: integer("enable_file_manager", { mode: "boolean" })
.notNull()
.default(true),
enableDocker: integer("enable_docker", { mode: "boolean" })
.notNull()
.default(false),
defaultPath: text("default_path"),
statsConfig: text("stats_config"),
terminalConfig: text("terminal_config"),
quickActions: text("quick_actions"),
notes: text("notes"),
useSocks5: integer("use_socks5", { mode: "boolean" }),
socks5Host: text("socks5_host"),
socks5Port: integer("socks5_port"),
socks5Username: text("socks5_username"),
socks5Password: text("socks5_password"),
socks5ProxyChain: text("socks5_proxy_chain"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -172,6 +185,11 @@ export const sshCredentials = sqliteTable("ssh_credentials", {
key_password: text("key_password"),
keyType: text("key_type"),
detectedKeyType: text("detected_key_type"),
systemPassword: text("system_password"),
systemKey: text("system_key", { length: 16384 }),
systemKeyPassword: text("system_key_password"),
usageCount: integer("usage_count").notNull().default(0),
lastUsed: text("last_used"),
createdAt: text("created_at")
@@ -283,6 +301,80 @@ export const networkTopology = sqliteTable("network_topology", {
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
topology: text("topology"),
export const hostAccess = sqliteTable("host_access", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id, { onDelete: "cascade" }),
userId: text("user_id")
.references(() => users.id, { onDelete: "cascade" }),
roleId: integer("role_id")
.references(() => roles.id, { onDelete: "cascade" }),
grantedBy: text("granted_by")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
permissionLevel: text("permission_level")
.notNull()
.default("view"),
expiresAt: text("expires_at"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
lastAccessedAt: text("last_accessed_at"),
accessCount: integer("access_count").notNull().default(0),
});
export const sharedCredentials = sqliteTable("shared_credentials", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostAccessId: integer("host_access_id")
.notNull()
.references(() => hostAccess.id, { onDelete: "cascade" }),
originalCredentialId: integer("original_credential_id")
.notNull()
.references(() => sshCredentials.id, { onDelete: "cascade" }),
targetUserId: text("target_user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
encryptedUsername: text("encrypted_username").notNull(),
encryptedAuthType: text("encrypted_auth_type").notNull(),
encryptedPassword: text("encrypted_password"),
encryptedKey: text("encrypted_key", { length: 16384 }),
encryptedKeyPassword: text("encrypted_key_password"),
encryptedKeyType: text("encrypted_key_type"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
updatedAt: text("updated_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
needsReEncryption: integer("needs_re_encryption", { mode: "boolean" })
.notNull()
.default(false),
});
export const roles = sqliteTable("roles", {
id: integer("id").primaryKey({ autoIncrement: true }),
name: text("name").notNull().unique(),
displayName: text("display_name").notNull(),
description: text("description"),
isSystem: integer("is_system", { mode: "boolean" })
.notNull()
.default(false),
permissions: text("permissions"),
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -290,3 +382,74 @@ export const networkTopology = sqliteTable("network_topology", {
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const userRoles = sqliteTable("user_roles", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
roleId: integer("role_id")
.notNull()
.references(() => roles.id, { onDelete: "cascade" }),
grantedBy: text("granted_by").references(() => users.id, {
onDelete: "set null",
}),
grantedAt: text("granted_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const auditLogs = sqliteTable("audit_logs", {
id: integer("id").primaryKey({ autoIncrement: true }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
username: text("username").notNull(),
action: text("action").notNull(),
resourceType: text("resource_type").notNull(),
resourceId: text("resource_id"),
resourceName: text("resource_name"),
details: text("details"),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
success: integer("success", { mode: "boolean" }).notNull(),
errorMessage: text("error_message"),
timestamp: text("timestamp")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
export const sessionRecordings = sqliteTable("session_recordings", {
id: integer("id").primaryKey({ autoIncrement: true }),
hostId: integer("host_id")
.notNull()
.references(() => sshData.id, { onDelete: "cascade" }),
userId: text("user_id")
.notNull()
.references(() => users.id, { onDelete: "cascade" }),
accessId: integer("access_id").references(() => hostAccess.id, {
onDelete: "set null",
}),
startedAt: text("started_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
endedAt: text("ended_at"),
duration: integer("duration"),
commands: text("commands"),
dangerousActions: text("dangerous_actions"),
recordingPath: text("recording_path"),
terminatedByOwner: integer("terminated_by_owner", { mode: "boolean" })
.default(false),
terminationReason: text("termination_reason"),
});

View File

@@ -1,7 +1,15 @@
import type { AuthenticatedRequest } from "../../../types/index.js";
import type {
AuthenticatedRequest,
CredentialBackend,
} from "../../../types/index.js";
import express from "express";
import { db } from "../db/index.js";
import { sshCredentials, sshCredentialUsage, sshData } from "../db/schema.js";
import {
sshCredentials,
sshCredentialUsage,
sshData,
hostAccess,
} from "../db/schema.js";
import { eq, and, desc, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { authLogger } from "../../utils/logger.js";
@@ -470,6 +478,14 @@ router.put(
userId,
);
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
await sharedCredManager.updateSharedCredentialsForOriginal(
parseInt(id),
userId,
);
const credential = updated[0];
authLogger.success(
`SSH credential updated: ${credential.name} (${credential.authType}) by user ${userId}`,
@@ -524,8 +540,6 @@ router.delete(
return res.status(404).json({ error: "Credential not found" });
}
// Update hosts using this credential to set credentialId to null
// This prevents orphaned references before deletion
const hostsUsingCredential = await db
.select()
.from(sshData)
@@ -552,10 +566,32 @@ router.delete(
eq(sshData.userId, userId),
),
);
for (const host of hostsUsingCredential) {
const revokedShares = await db
.delete(hostAccess)
.where(eq(hostAccess.hostId, host.id))
.returning({ id: hostAccess.id });
if (revokedShares.length > 0) {
authLogger.info(
"Auto-revoked host shares due to credential deletion",
{
operation: "auto_revoke_shares",
hostId: host.id,
credentialId: parseInt(id),
revokedCount: revokedShares.length,
reason: "credential_deleted",
},
);
}
}
}
// sshCredentialUsage will be automatically deleted by ON DELETE CASCADE
// No need for manual deletion
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
await sharedCredManager.deleteSharedCredentialsForOriginal(parseInt(id));
await db
.delete(sshCredentials)
@@ -1124,10 +1160,9 @@ router.post(
async function deploySSHKeyToHost(
hostConfig: Record<string, unknown>,
publicKey: string,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_credentialData: Record<string, unknown>,
credData: CredentialBackend,
): Promise<{ success: boolean; message?: string; error?: string }> {
const publicKey = credData.public_key as string;
return new Promise((resolve) => {
const conn = new Client();
@@ -1248,7 +1283,7 @@ async function deploySSHKeyToHost(
.replace(/'/g, "'\\''");
conn.exec(
`printf '%s\\n' '${escapedKey}' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
`printf '%s\\n' '${escapedKey} ${credData.name}@Termix' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
(err, stream) => {
if (err) {
clearTimeout(addTimeout);
@@ -1510,7 +1545,7 @@ router.post(
});
}
const credData = credential[0];
const credData = credential[0] as unknown as CredentialBackend;
if (credData.authType !== "key") {
return res.status(400).json({
@@ -1519,7 +1554,7 @@ router.post(
});
}
const publicKey = credData.public_key || credData.publicKey;
const publicKey = credData.public_key;
if (!publicKey) {
return res.status(400).json({
success: false,
@@ -1599,11 +1634,7 @@ router.post(
}
}
const deployResult = await deploySSHKeyToHost(
hostConfig,
publicKey as string,
credData,
);
const deployResult = await deploySSHKeyToHost(hostConfig, credData);
if (deployResult.success) {
res.json({

View File

@@ -0,0 +1,850 @@
import type { AuthenticatedRequest } from "../../../types/index.js";
import express from "express";
import { db } from "../db/index.js";
import {
hostAccess,
sshData,
users,
roles,
userRoles,
auditLogs,
sharedCredentials,
} from "../db/schema.js";
import { eq, and, desc, sql, or, isNull, gte } from "drizzle-orm";
import type { Request, Response } from "express";
import { databaseLogger } from "../../utils/logger.js";
import { AuthManager } from "../../utils/auth-manager.js";
import { PermissionManager } from "../../utils/permission-manager.js";
const router = express.Router();
const authManager = AuthManager.getInstance();
const permissionManager = PermissionManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
function isNonEmptyString(value: unknown): value is string {
return typeof value === "string" && value.trim().length > 0;
}
//Share a host with a user or role
//POST /rbac/host/:id/share
router.post(
"/host/:id/share",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const hostId = parseInt(req.params.id, 10);
const userId = req.userId!;
if (isNaN(hostId)) {
return res.status(400).json({ error: "Invalid host ID" });
}
try {
const {
targetType = "user",
targetUserId,
targetRoleId,
durationHours,
permissionLevel = "view",
} = req.body;
if (!["user", "role"].includes(targetType)) {
return res
.status(400)
.json({ error: "Invalid target type. Must be 'user' or 'role'" });
}
if (targetType === "user" && !isNonEmptyString(targetUserId)) {
return res
.status(400)
.json({ error: "Target user ID is required when sharing with user" });
}
if (targetType === "role" && !targetRoleId) {
return res
.status(400)
.json({ error: "Target role ID is required when sharing with role" });
}
const host = await db
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
.limit(1);
if (host.length === 0) {
databaseLogger.warn("Attempt to share host not owned by user", {
operation: "share_host",
userId,
hostId,
});
return res.status(403).json({ error: "Not host owner" });
}
if (!host[0].credentialId) {
return res.status(400).json({
error:
"Only hosts using credentials can be shared. Please create a credential and assign it to this host before sharing.",
code: "CREDENTIAL_REQUIRED_FOR_SHARING",
});
}
if (targetType === "user") {
const targetUser = await db
.select({ id: users.id, username: users.username })
.from(users)
.where(eq(users.id, targetUserId))
.limit(1);
if (targetUser.length === 0) {
return res.status(404).json({ error: "Target user not found" });
}
} else {
const targetRole = await db
.select({ id: roles.id, name: roles.name })
.from(roles)
.where(eq(roles.id, targetRoleId))
.limit(1);
if (targetRole.length === 0) {
return res.status(404).json({ error: "Target role not found" });
}
}
let expiresAt: string | null = null;
if (
durationHours &&
typeof durationHours === "number" &&
durationHours > 0
) {
const expiryDate = new Date();
expiryDate.setHours(expiryDate.getHours() + durationHours);
expiresAt = expiryDate.toISOString();
}
const validLevels = ["view"];
if (!validLevels.includes(permissionLevel)) {
return res.status(400).json({
error: "Invalid permission level. Only 'view' is supported.",
validLevels,
});
}
const whereConditions = [eq(hostAccess.hostId, hostId)];
if (targetType === "user") {
whereConditions.push(eq(hostAccess.userId, targetUserId));
} else {
whereConditions.push(eq(hostAccess.roleId, targetRoleId));
}
const existing = await db
.select()
.from(hostAccess)
.where(and(...whereConditions))
.limit(1);
if (existing.length > 0) {
await db
.update(hostAccess)
.set({
permissionLevel,
expiresAt,
})
.where(eq(hostAccess.id, existing[0].id));
await db
.delete(sharedCredentials)
.where(eq(sharedCredentials.hostAccessId, existing[0].id));
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
if (targetType === "user") {
await sharedCredManager.createSharedCredentialForUser(
existing[0].id,
host[0].credentialId,
targetUserId!,
userId,
);
} else {
await sharedCredManager.createSharedCredentialsForRole(
existing[0].id,
host[0].credentialId,
targetRoleId!,
userId,
);
}
return res.json({
success: true,
message: "Host access updated",
expiresAt,
});
}
const result = await db.insert(hostAccess).values({
hostId,
userId: targetType === "user" ? targetUserId : null,
roleId: targetType === "role" ? targetRoleId : null,
grantedBy: userId,
permissionLevel,
expiresAt,
});
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
if (targetType === "user") {
await sharedCredManager.createSharedCredentialForUser(
result.lastInsertRowid as number,
host[0].credentialId,
targetUserId!,
userId,
);
} else {
await sharedCredManager.createSharedCredentialsForRole(
result.lastInsertRowid as number,
host[0].credentialId,
targetRoleId!,
userId,
);
}
res.json({
success: true,
message: `Host shared successfully with ${targetType}`,
expiresAt,
});
} catch (error) {
databaseLogger.error("Failed to share host", error, {
operation: "share_host",
hostId,
userId,
});
res.status(500).json({ error: "Failed to share host" });
}
},
);
// Revoke host access
// DELETE /rbac/host/:id/access/:accessId
router.delete(
"/host/:id/access/:accessId",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const hostId = parseInt(req.params.id, 10);
const accessId = parseInt(req.params.accessId, 10);
const userId = req.userId!;
if (isNaN(hostId) || isNaN(accessId)) {
return res.status(400).json({ error: "Invalid ID" });
}
try {
const host = await db
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
.limit(1);
if (host.length === 0) {
return res.status(403).json({ error: "Not host owner" });
}
await db.delete(hostAccess).where(eq(hostAccess.id, accessId));
res.json({ success: true, message: "Access revoked" });
} catch (error) {
databaseLogger.error("Failed to revoke host access", error, {
operation: "revoke_host_access",
hostId,
accessId,
userId,
});
res.status(500).json({ error: "Failed to revoke access" });
}
},
);
// Get host access list
// GET /rbac/host/:id/access
router.get(
"/host/:id/access",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const hostId = parseInt(req.params.id, 10);
const userId = req.userId!;
if (isNaN(hostId)) {
return res.status(400).json({ error: "Invalid host ID" });
}
try {
const host = await db
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
.limit(1);
if (host.length === 0) {
return res.status(403).json({ error: "Not host owner" });
}
const rawAccessList = await db
.select({
id: hostAccess.id,
userId: hostAccess.userId,
roleId: hostAccess.roleId,
username: users.username,
roleName: roles.name,
roleDisplayName: roles.displayName,
grantedBy: hostAccess.grantedBy,
grantedByUsername: sql<string>`(SELECT username FROM users WHERE id = ${hostAccess.grantedBy})`,
permissionLevel: hostAccess.permissionLevel,
expiresAt: hostAccess.expiresAt,
createdAt: hostAccess.createdAt,
})
.from(hostAccess)
.leftJoin(users, eq(hostAccess.userId, users.id))
.leftJoin(roles, eq(hostAccess.roleId, roles.id))
.where(eq(hostAccess.hostId, hostId))
.orderBy(desc(hostAccess.createdAt));
const accessList = rawAccessList.map((access) => ({
id: access.id,
targetType: access.userId ? "user" : "role",
userId: access.userId,
roleId: access.roleId,
username: access.username,
roleName: access.roleName,
roleDisplayName: access.roleDisplayName,
grantedBy: access.grantedBy,
grantedByUsername: access.grantedByUsername,
permissionLevel: access.permissionLevel,
expiresAt: access.expiresAt,
createdAt: access.createdAt,
}));
res.json({ accessList });
} catch (error) {
databaseLogger.error("Failed to get host access list", error, {
operation: "get_host_access_list",
hostId,
userId,
});
res.status(500).json({ error: "Failed to get access list" });
}
},
);
// Get user's shared hosts (hosts shared WITH this user)
// GET /rbac/shared-hosts
router.get(
"/shared-hosts",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const userId = req.userId!;
try {
const now = new Date().toISOString();
const sharedHosts = await db
.select({
id: sshData.id,
name: sshData.name,
ip: sshData.ip,
port: sshData.port,
username: sshData.username,
folder: sshData.folder,
tags: sshData.tags,
permissionLevel: hostAccess.permissionLevel,
expiresAt: hostAccess.expiresAt,
grantedBy: hostAccess.grantedBy,
ownerUsername: users.username,
})
.from(hostAccess)
.innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
.innerJoin(users, eq(sshData.userId, users.id))
.where(
and(
eq(hostAccess.userId, userId),
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
),
)
.orderBy(desc(hostAccess.createdAt));
res.json({ sharedHosts });
} catch (error) {
databaseLogger.error("Failed to get shared hosts", error, {
operation: "get_shared_hosts",
userId,
});
res.status(500).json({ error: "Failed to get shared hosts" });
}
},
);
// Get all roles
// GET /rbac/roles
router.get(
"/roles",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
try {
const allRoles = await db
.select()
.from(roles)
.orderBy(roles.isSystem, roles.name);
const rolesWithParsedPermissions = allRoles.map((role) => ({
...role,
permissions: JSON.parse(role.permissions),
}));
res.json({ roles: rolesWithParsedPermissions });
} catch (error) {
databaseLogger.error("Failed to get roles", error, {
operation: "get_roles",
});
res.status(500).json({ error: "Failed to get roles" });
}
},
);
// Get all roles
// GET /rbac/roles
router.get(
"/roles",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
try {
const rolesList = await db
.select({
id: roles.id,
name: roles.name,
displayName: roles.displayName,
description: roles.description,
isSystem: roles.isSystem,
createdAt: roles.createdAt,
updatedAt: roles.updatedAt,
})
.from(roles)
.orderBy(roles.isSystem, roles.name);
res.json({ roles: rolesList });
} catch (error) {
databaseLogger.error("Failed to get roles", error, {
operation: "get_roles",
});
res.status(500).json({ error: "Failed to get roles" });
}
},
);
// Create new role
// POST /rbac/roles
router.post(
"/roles",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
const { name, displayName, description } = req.body;
if (!isNonEmptyString(name) || !isNonEmptyString(displayName)) {
return res.status(400).json({
error: "Role name and display name are required",
});
}
if (!/^[a-z0-9_-]+$/.test(name)) {
return res.status(400).json({
error:
"Role name must contain only lowercase letters, numbers, underscores, and hyphens",
});
}
try {
const existing = await db
.select({ id: roles.id })
.from(roles)
.where(eq(roles.name, name))
.limit(1);
if (existing.length > 0) {
return res.status(409).json({
error: "A role with this name already exists",
});
}
const result = await db.insert(roles).values({
name,
displayName,
description: description || null,
isSystem: false,
permissions: null,
});
const newRoleId = result.lastInsertRowid;
res.status(201).json({
success: true,
roleId: newRoleId,
message: "Role created successfully",
});
} catch (error) {
databaseLogger.error("Failed to create role", error, {
operation: "create_role",
roleName: name,
});
res.status(500).json({ error: "Failed to create role" });
}
},
);
// Update role
// PUT /rbac/roles/:id
router.put(
"/roles/:id",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
const roleId = parseInt(req.params.id, 10);
const { displayName, description } = req.body;
if (isNaN(roleId)) {
return res.status(400).json({ error: "Invalid role ID" });
}
if (!displayName && description === undefined) {
return res.status(400).json({
error: "At least one field (displayName or description) is required",
});
}
try {
const existingRole = await db
.select({
id: roles.id,
name: roles.name,
isSystem: roles.isSystem,
})
.from(roles)
.where(eq(roles.id, roleId))
.limit(1);
if (existingRole.length === 0) {
return res.status(404).json({ error: "Role not found" });
}
const updates: {
displayName?: string;
description?: string | null;
updatedAt: string;
} = {
updatedAt: new Date().toISOString(),
};
if (displayName) {
updates.displayName = displayName;
}
if (description !== undefined) {
updates.description = description || null;
}
await db.update(roles).set(updates).where(eq(roles.id, roleId));
res.json({
success: true,
message: "Role updated successfully",
});
} catch (error) {
databaseLogger.error("Failed to update role", error, {
operation: "update_role",
roleId,
});
res.status(500).json({ error: "Failed to update role" });
}
},
);
// Delete role
// DELETE /rbac/roles/:id
router.delete(
"/roles/:id",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
const roleId = parseInt(req.params.id, 10);
if (isNaN(roleId)) {
return res.status(400).json({ error: "Invalid role ID" });
}
try {
const role = await db
.select({
id: roles.id,
name: roles.name,
isSystem: roles.isSystem,
})
.from(roles)
.where(eq(roles.id, roleId))
.limit(1);
if (role.length === 0) {
return res.status(404).json({ error: "Role not found" });
}
if (role[0].isSystem) {
return res.status(403).json({
error: "Cannot delete system roles",
});
}
const deletedUserRoles = await db
.delete(userRoles)
.where(eq(userRoles.roleId, roleId))
.returning({ userId: userRoles.userId });
for (const { userId } of deletedUserRoles) {
permissionManager.invalidateUserPermissionCache(userId);
}
const deletedHostAccess = await db
.delete(hostAccess)
.where(eq(hostAccess.roleId, roleId))
.returning({ id: hostAccess.id });
await db.delete(roles).where(eq(roles.id, roleId));
res.json({
success: true,
message: "Role deleted successfully",
});
} catch (error) {
databaseLogger.error("Failed to delete role", error, {
operation: "delete_role",
roleId,
});
res.status(500).json({ error: "Failed to delete role" });
}
},
);
// Assign role to user
// POST /rbac/users/:userId/roles
router.post(
"/users/:userId/roles",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
const targetUserId = req.params.userId;
const currentUserId = req.userId!;
try {
const { roleId } = req.body;
if (typeof roleId !== "number") {
return res.status(400).json({ error: "Role ID is required" });
}
const targetUser = await db
.select()
.from(users)
.where(eq(users.id, targetUserId))
.limit(1);
if (targetUser.length === 0) {
return res.status(404).json({ error: "User not found" });
}
const role = await db
.select()
.from(roles)
.where(eq(roles.id, roleId))
.limit(1);
if (role.length === 0) {
return res.status(404).json({ error: "Role not found" });
}
if (role[0].isSystem) {
return res.status(403).json({
error:
"System roles (admin, user) are automatically assigned and cannot be manually assigned",
});
}
const existing = await db
.select()
.from(userRoles)
.where(
and(eq(userRoles.userId, targetUserId), eq(userRoles.roleId, roleId)),
)
.limit(1);
if (existing.length > 0) {
return res.status(409).json({ error: "Role already assigned" });
}
await db.insert(userRoles).values({
userId: targetUserId,
roleId,
grantedBy: currentUserId,
});
const hostsSharedWithRole = await db
.select()
.from(hostAccess)
.innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
.where(eq(hostAccess.roleId, roleId));
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
for (const { host_access, ssh_data } of hostsSharedWithRole) {
if (ssh_data.credentialId) {
try {
await sharedCredManager.createSharedCredentialForUser(
host_access.id,
ssh_data.credentialId,
targetUserId,
ssh_data.userId,
);
} catch (error) {
databaseLogger.error(
"Failed to create shared credential for new role member",
error,
{
operation: "assign_role_create_credentials",
targetUserId,
roleId,
hostId: ssh_data.id,
},
);
}
}
}
permissionManager.invalidateUserPermissionCache(targetUserId);
res.json({
success: true,
message: "Role assigned successfully",
});
} catch (error) {
databaseLogger.error("Failed to assign role", error, {
operation: "assign_role",
targetUserId,
});
res.status(500).json({ error: "Failed to assign role" });
}
},
);
// Remove role from user
// DELETE /rbac/users/:userId/roles/:roleId
router.delete(
"/users/:userId/roles/:roleId",
authenticateJWT,
permissionManager.requireAdmin(),
async (req: AuthenticatedRequest, res: Response) => {
const targetUserId = req.params.userId;
const roleId = parseInt(req.params.roleId, 10);
if (isNaN(roleId)) {
return res.status(400).json({ error: "Invalid role ID" });
}
try {
const role = await db
.select({
id: roles.id,
name: roles.name,
isSystem: roles.isSystem,
})
.from(roles)
.where(eq(roles.id, roleId))
.limit(1);
if (role.length === 0) {
return res.status(404).json({ error: "Role not found" });
}
if (role[0].isSystem) {
return res.status(403).json({
error:
"System roles (admin, user) are automatically assigned and cannot be removed",
});
}
await db
.delete(userRoles)
.where(
and(eq(userRoles.userId, targetUserId), eq(userRoles.roleId, roleId)),
);
permissionManager.invalidateUserPermissionCache(targetUserId);
res.json({
success: true,
message: "Role removed successfully",
});
} catch (error) {
databaseLogger.error("Failed to remove role", error, {
operation: "remove_role",
targetUserId,
roleId,
});
res.status(500).json({ error: "Failed to remove role" });
}
},
);
// Get user's roles
// GET /rbac/users/:userId/roles
router.get(
"/users/:userId/roles",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const targetUserId = req.params.userId;
const currentUserId = req.userId!;
if (
targetUserId !== currentUserId &&
!(await permissionManager.isAdmin(currentUserId))
) {
return res.status(403).json({ error: "Access denied" });
}
try {
const userRolesList = await db
.select({
id: userRoles.id,
roleId: roles.id,
roleName: roles.name,
roleDisplayName: roles.displayName,
description: roles.description,
isSystem: roles.isSystem,
grantedAt: userRoles.grantedAt,
})
.from(userRoles)
.innerJoin(roles, eq(userRoles.roleId, roles.id))
.where(eq(userRoles.userId, targetUserId));
res.json({ roles: userRolesList });
} catch (error) {
databaseLogger.error("Failed to get user roles", error, {
operation: "get_user_roles",
targetUserId,
});
res.status(500).json({ error: "Failed to get user roles" });
}
},
);
export default router;

View File

@@ -11,13 +11,27 @@ import {
sshFolders,
commandHistory,
recentActivity,
hostAccess,
userRoles,
sessionRecordings,
} from "../db/schema.js";
import { eq, and, desc, isNotNull, or } from "drizzle-orm";
import {
eq,
and,
desc,
isNotNull,
or,
isNull,
gte,
sql,
inArray,
} from "drizzle-orm";
import type { Request, Response } from "express";
import multer from "multer";
import { sshLogger } from "../../utils/logger.js";
import { SimpleDBOps } from "../../utils/simple-db-ops.js";
import { AuthManager } from "../../utils/auth-manager.js";
import { PermissionManager } from "../../utils/permission-manager.js";
import { DataCrypto } from "../../utils/data-crypto.js";
import { SystemCrypto } from "../../utils/system-crypto.js";
import { DatabaseSaveTrigger } from "../db/index.js";
@@ -35,6 +49,7 @@ function isValidPort(port: unknown): port is number {
}
const authManager = AuthManager.getInstance();
const permissionManager = PermissionManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
const requireDataAccess = authManager.createDataAccessMiddleware();
@@ -231,10 +246,12 @@ router.post(
key,
keyPassword,
keyType,
sudoPassword,
pin,
enableTerminal,
enableTunnel,
enableFileManager,
enableDocker,
defaultPath,
tunnelConnections,
jumpHosts,
@@ -242,7 +259,16 @@ router.post(
statsConfig,
terminalConfig,
forceKeyboardInteractive,
notes,
useSocks5,
socks5Host,
socks5Port,
socks5Username,
socks5Password,
socks5ProxyChain,
overrideCredentialUsername,
} = hostData;
if (
!isNonEmptyString(userId) ||
!isNonEmptyString(ip) ||
@@ -269,6 +295,7 @@ router.post(
username,
authType: effectiveAuthType,
credentialId: credentialId || null,
overrideCredentialUsername: overrideCredentialUsername ? 1 : 0,
pin: pin ? 1 : 0,
enableTerminal: enableTerminal ? 1 : 0,
enableTunnel: enableTunnel ? 1 : 0,
@@ -280,10 +307,21 @@ router.post(
? JSON.stringify(quickActions)
: null,
enableFileManager: enableFileManager ? 1 : 0,
enableDocker: enableDocker ? 1 : 0,
defaultPath: defaultPath || null,
statsConfig: statsConfig ? JSON.stringify(statsConfig) : null,
terminalConfig: terminalConfig ? JSON.stringify(terminalConfig) : null,
forceKeyboardInteractive: forceKeyboardInteractive ? "true" : "false",
notes: notes || null,
sudoPassword: sudoPassword || null,
useSocks5: useSocks5 ? 1 : 0,
socks5Host: socks5Host || null,
socks5Port: socks5Port || null,
socks5Username: socks5Username || null,
socks5Password: socks5Password || null,
socks5ProxyChain: socks5ProxyChain
? JSON.stringify(socks5ProxyChain)
: null,
};
if (effectiveAuthType === "password") {
@@ -341,12 +379,14 @@ router.post(
? JSON.parse(createdHost.jumpHosts as string)
: [],
enableFileManager: !!createdHost.enableFileManager,
enableDocker: !!createdHost.enableDocker,
statsConfig: createdHost.statsConfig
? JSON.parse(createdHost.statsConfig as string)
: undefined,
};
const resolvedHost = (await resolveHostCredentials(baseHost)) || baseHost;
const resolvedHost =
(await resolveHostCredentials(baseHost, userId)) || baseHost;
sshLogger.success(
`SSH host created: ${name} (${ip}:${port}) by user ${userId}`,
@@ -453,10 +493,12 @@ router.put(
key,
keyPassword,
keyType,
sudoPassword,
pin,
enableTerminal,
enableTunnel,
enableFileManager,
enableDocker,
defaultPath,
tunnelConnections,
jumpHosts,
@@ -464,7 +506,16 @@ router.put(
statsConfig,
terminalConfig,
forceKeyboardInteractive,
notes,
useSocks5,
socks5Host,
socks5Port,
socks5Username,
socks5Password,
socks5ProxyChain,
overrideCredentialUsername,
} = hostData;
if (
!isNonEmptyString(userId) ||
!isNonEmptyString(ip) ||
@@ -492,6 +543,7 @@ router.put(
username,
authType: effectiveAuthType,
credentialId: credentialId || null,
overrideCredentialUsername: overrideCredentialUsername ? 1 : 0,
pin: pin ? 1 : 0,
enableTerminal: enableTerminal ? 1 : 0,
enableTunnel: enableTunnel ? 1 : 0,
@@ -503,10 +555,21 @@ router.put(
? JSON.stringify(quickActions)
: null,
enableFileManager: enableFileManager ? 1 : 0,
enableDocker: enableDocker ? 1 : 0,
defaultPath: defaultPath || null,
statsConfig: statsConfig ? JSON.stringify(statsConfig) : null,
terminalConfig: terminalConfig ? JSON.stringify(terminalConfig) : null,
forceKeyboardInteractive: forceKeyboardInteractive ? "true" : "false",
notes: notes || null,
sudoPassword: sudoPassword || null,
useSocks5: useSocks5 ? 1 : 0,
socks5Host: socks5Host || null,
socks5Port: socks5Port || null,
socks5Username: socks5Username || null,
socks5Password: socks5Password || null,
socks5ProxyChain: socks5ProxyChain
? JSON.stringify(socks5ProxyChain)
: null,
};
if (effectiveAuthType === "password") {
@@ -535,23 +598,100 @@ router.put(
}
try {
const accessInfo = await permissionManager.canAccessHost(
userId,
Number(hostId),
"write",
);
if (!accessInfo.hasAccess) {
sshLogger.warn("User does not have permission to update host", {
operation: "host_update",
hostId: parseInt(hostId),
userId,
});
return res.status(403).json({ error: "Access denied" });
}
if (!accessInfo.isOwner) {
sshLogger.warn("Shared user attempted to update host (view-only)", {
operation: "host_update",
hostId: parseInt(hostId),
userId,
});
return res.status(403).json({
error: "Only the host owner can modify host configuration",
});
}
const hostRecord = await db
.select({
userId: sshData.userId,
credentialId: sshData.credentialId,
authType: sshData.authType,
})
.from(sshData)
.where(eq(sshData.id, Number(hostId)))
.limit(1);
if (hostRecord.length === 0) {
sshLogger.warn("Host not found for update", {
operation: "host_update",
hostId: parseInt(hostId),
userId,
});
return res.status(404).json({ error: "Host not found" });
}
const ownerId = hostRecord[0].userId;
if (
!accessInfo.isOwner &&
sshDataObj.credentialId !== undefined &&
sshDataObj.credentialId !== hostRecord[0].credentialId
) {
return res.status(403).json({
error: "Only the host owner can change the credential",
});
}
if (
!accessInfo.isOwner &&
sshDataObj.authType !== undefined &&
sshDataObj.authType !== hostRecord[0].authType
) {
return res.status(403).json({
error: "Only the host owner can change the authentication type",
});
}
if (sshDataObj.credentialId !== undefined) {
if (
hostRecord[0].credentialId !== null &&
sshDataObj.credentialId === null
) {
const revokedShares = await db
.delete(hostAccess)
.where(eq(hostAccess.hostId, Number(hostId)))
.returning({ id: hostAccess.id, userId: hostAccess.userId });
}
}
await SimpleDBOps.update(
sshData,
"ssh_data",
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
eq(sshData.id, Number(hostId)),
sshDataObj,
userId,
ownerId,
);
const updatedHosts = await SimpleDBOps.select(
db
.select()
.from(sshData)
.where(
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
),
.where(eq(sshData.id, Number(hostId))),
"ssh_data",
userId,
ownerId,
);
if (updatedHosts.length === 0) {
@@ -582,12 +722,17 @@ router.put(
? JSON.parse(updatedHost.jumpHosts as string)
: [],
enableFileManager: !!updatedHost.enableFileManager,
enableDocker: !!updatedHost.enableDocker,
statsConfig: updatedHost.statsConfig
? JSON.parse(updatedHost.statsConfig as string)
: undefined,
dockerConfig: updatedHost.dockerConfig
? JSON.parse(updatedHost.dockerConfig as string)
: undefined,
};
const resolvedHost = (await resolveHostCredentials(baseHost)) || baseHost;
const resolvedHost =
(await resolveHostCredentials(baseHost, userId)) || baseHost;
sshLogger.success(
`SSH host updated: ${name} (${ip}:${port}) by user ${userId}`,
@@ -656,11 +801,115 @@ router.get(
return res.status(400).json({ error: "Invalid userId" });
}
try {
const data = await SimpleDBOps.select(
db.select().from(sshData).where(eq(sshData.userId, userId)),
"ssh_data",
userId,
);
const now = new Date().toISOString();
const userRoleIds = await db
.select({ roleId: userRoles.roleId })
.from(userRoles)
.where(eq(userRoles.userId, userId));
const roleIds = userRoleIds.map((r) => r.roleId);
const rawData = await db
.select({
id: sshData.id,
userId: sshData.userId,
name: sshData.name,
ip: sshData.ip,
port: sshData.port,
username: sshData.username,
folder: sshData.folder,
tags: sshData.tags,
pin: sshData.pin,
authType: sshData.authType,
password: sshData.password,
key: sshData.key,
keyPassword: sshData.key_password,
keyType: sshData.keyType,
enableTerminal: sshData.enableTerminal,
enableTunnel: sshData.enableTunnel,
tunnelConnections: sshData.tunnelConnections,
jumpHosts: sshData.jumpHosts,
enableFileManager: sshData.enableFileManager,
defaultPath: sshData.defaultPath,
autostartPassword: sshData.autostartPassword,
autostartKey: sshData.autostartKey,
autostartKeyPassword: sshData.autostartKeyPassword,
forceKeyboardInteractive: sshData.forceKeyboardInteractive,
statsConfig: sshData.statsConfig,
terminalConfig: sshData.terminalConfig,
createdAt: sshData.createdAt,
updatedAt: sshData.updatedAt,
credentialId: sshData.credentialId,
overrideCredentialUsername: sshData.overrideCredentialUsername,
quickActions: sshData.quickActions,
notes: sshData.notes,
enableDocker: sshData.enableDocker,
useSocks5: sshData.useSocks5,
socks5Host: sshData.socks5Host,
socks5Port: sshData.socks5Port,
socks5Username: sshData.socks5Username,
socks5Password: sshData.socks5Password,
socks5ProxyChain: sshData.socks5ProxyChain,
ownerId: sshData.userId,
isShared: sql<boolean>`${hostAccess.id} IS NOT NULL AND ${sshData.userId} != ${userId}`,
permissionLevel: hostAccess.permissionLevel,
expiresAt: hostAccess.expiresAt,
})
.from(sshData)
.leftJoin(
hostAccess,
and(
eq(hostAccess.hostId, sshData.id),
or(
eq(hostAccess.userId, userId),
roleIds.length > 0
? inArray(hostAccess.roleId, roleIds)
: sql`false`,
),
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
),
)
.where(
or(
eq(sshData.userId, userId),
and(
eq(hostAccess.userId, userId),
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
),
roleIds.length > 0
? and(
inArray(hostAccess.roleId, roleIds),
or(
isNull(hostAccess.expiresAt),
gte(hostAccess.expiresAt, now),
),
)
: sql`false`,
),
);
const ownHosts = rawData.filter((row) => row.userId === userId);
const sharedHosts = rawData.filter((row) => row.userId !== userId);
let decryptedOwnHosts: any[] = [];
try {
decryptedOwnHosts = await SimpleDBOps.select(
Promise.resolve(ownHosts),
"ssh_data",
userId,
);
} catch (decryptError) {
sshLogger.error("Failed to decrypt own hosts", decryptError, {
operation: "host_fetch_own_decrypt_failed",
userId,
});
decryptedOwnHosts = [];
}
const sanitizedSharedHosts = sharedHosts;
const data = [...decryptedOwnHosts, ...sanitizedSharedHosts];
const result = await Promise.all(
data.map(async (row: Record<string, unknown>) => {
@@ -683,6 +932,7 @@ router.get(
? JSON.parse(row.quickActions as string)
: [],
enableFileManager: !!row.enableFileManager,
enableDocker: !!row.enableDocker,
statsConfig: row.statsConfig
? JSON.parse(row.statsConfig as string)
: undefined,
@@ -690,9 +940,18 @@ router.get(
? JSON.parse(row.terminalConfig as string)
: undefined,
forceKeyboardInteractive: row.forceKeyboardInteractive === "true",
socks5ProxyChain: row.socks5ProxyChain
? JSON.parse(row.socks5ProxyChain as string)
: [],
isShared: !!row.isShared,
permissionLevel: row.permissionLevel || undefined,
sharedExpiresAt: row.expiresAt || undefined,
};
return (await resolveHostCredentials(baseHost)) || baseHost;
const resolved =
(await resolveHostCredentials(baseHost, userId)) || baseHost;
return resolved;
}),
);
@@ -765,9 +1024,12 @@ router.get(
? JSON.parse(host.terminalConfig)
: undefined,
forceKeyboardInteractive: host.forceKeyboardInteractive === "true",
socks5ProxyChain: host.socks5ProxyChain
? JSON.parse(host.socks5ProxyChain)
: [],
};
res.json((await resolveHostCredentials(result)) || result);
res.json((await resolveHostCredentials(result, userId)) || result);
} catch (err) {
sshLogger.error("Failed to fetch SSH host by ID from database", err, {
operation: "host_fetch_by_id",
@@ -811,7 +1073,7 @@ router.get(
const host = hosts[0];
const resolvedHost = (await resolveHostCredentials(host)) || host;
const resolvedHost = (await resolveHostCredentials(host, userId)) || host;
const exportData = {
name: resolvedHost.name,
@@ -836,6 +1098,9 @@ router.get(
tunnelConnections: resolvedHost.tunnelConnections
? JSON.parse(resolvedHost.tunnelConnections as string)
: [],
socks5ProxyChain: resolvedHost.socks5ProxyChain
? JSON.parse(resolvedHost.socks5ProxyChain as string)
: [],
};
sshLogger.success("Host exported with decrypted credentials", {
@@ -893,57 +1158,33 @@ router.delete(
await db
.delete(fileManagerRecent)
.where(
and(
eq(fileManagerRecent.hostId, numericHostId),
eq(fileManagerRecent.userId, userId),
),
);
.where(eq(fileManagerRecent.hostId, numericHostId));
await db
.delete(fileManagerPinned)
.where(
and(
eq(fileManagerPinned.hostId, numericHostId),
eq(fileManagerPinned.userId, userId),
),
);
.where(eq(fileManagerPinned.hostId, numericHostId));
await db
.delete(fileManagerShortcuts)
.where(
and(
eq(fileManagerShortcuts.hostId, numericHostId),
eq(fileManagerShortcuts.userId, userId),
),
);
.where(eq(fileManagerShortcuts.hostId, numericHostId));
await db
.delete(commandHistory)
.where(
and(
eq(commandHistory.hostId, numericHostId),
eq(commandHistory.userId, userId),
),
);
.where(eq(commandHistory.hostId, numericHostId));
await db
.delete(sshCredentialUsage)
.where(
and(
eq(sshCredentialUsage.hostId, numericHostId),
eq(sshCredentialUsage.userId, userId),
),
);
.where(eq(sshCredentialUsage.hostId, numericHostId));
await db
.delete(recentActivity)
.where(
and(
eq(recentActivity.hostId, numericHostId),
eq(recentActivity.userId, userId),
),
);
.where(eq(recentActivity.hostId, numericHostId));
await db.delete(hostAccess).where(eq(hostAccess.hostId, numericHostId));
await db
.delete(sessionRecordings)
.where(eq(sessionRecordings.hostId, numericHostId));
await db
.delete(sshData)
@@ -1450,11 +1691,55 @@ router.delete(
async function resolveHostCredentials(
host: Record<string, unknown>,
requestingUserId?: string,
): Promise<Record<string, unknown>> {
try {
if (host.credentialId && host.userId) {
if (host.credentialId && (host.userId || host.ownerId)) {
const credentialId = host.credentialId as number;
const userId = host.userId as string;
const ownerId = (host.ownerId || host.userId) as string;
if (requestingUserId && requestingUserId !== ownerId) {
try {
const { SharedCredentialManager } = await import(
"../../utils/shared-credential-manager.js"
);
const sharedCredManager = SharedCredentialManager.getInstance();
const sharedCred = await sharedCredManager.getSharedCredentialForUser(
host.id as number,
requestingUserId,
);
if (sharedCred) {
const resolvedHost: Record<string, unknown> = {
...host,
authType: sharedCred.authType,
password: sharedCred.password,
key: sharedCred.key,
keyPassword: sharedCred.keyPassword,
keyType: sharedCred.keyType,
};
if (!host.overrideCredentialUsername) {
resolvedHost.username = sharedCred.username;
}
return resolvedHost;
}
} catch (sharedCredError) {
sshLogger.warn(
"Failed to get shared credential, falling back to owner credential",
{
operation: "resolve_shared_credential_fallback",
hostId: host.id as number,
requestingUserId,
error:
sharedCredError instanceof Error
? sharedCredError.message
: "Unknown error",
},
);
}
}
const credentials = await SimpleDBOps.select(
db
@@ -1463,24 +1748,29 @@ async function resolveHostCredentials(
.where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, userId),
eq(sshCredentials.userId, ownerId),
),
),
"ssh_credentials",
userId,
ownerId,
);
if (credentials.length > 0) {
const credential = credentials[0];
return {
const resolvedHost: Record<string, unknown> = {
...host,
username: credential.username,
authType: credential.auth_type || credential.authType,
password: credential.password,
key: credential.key,
keyPassword: credential.key_password || credential.keyPassword,
keyType: credential.key_type || credential.keyType,
};
if (!host.overrideCredentialUsername) {
resolvedHost.username = credential.username;
}
return resolvedHost;
}
}
@@ -1680,6 +1970,40 @@ router.delete(
});
}
const hostIds = hostsToDelete.map((host) => host.id);
if (hostIds.length > 0) {
await db
.delete(fileManagerRecent)
.where(inArray(fileManagerRecent.hostId, hostIds));
await db
.delete(fileManagerPinned)
.where(inArray(fileManagerPinned.hostId, hostIds));
await db
.delete(fileManagerShortcuts)
.where(inArray(fileManagerShortcuts.hostId, hostIds));
await db
.delete(commandHistory)
.where(inArray(commandHistory.hostId, hostIds));
await db
.delete(sshCredentialUsage)
.where(inArray(sshCredentialUsage.hostId, hostIds));
await db
.delete(recentActivity)
.where(inArray(recentActivity.hostId, hostIds));
await db.delete(hostAccess).where(inArray(hostAccess.hostId, hostIds));
await db
.delete(sessionRecordings)
.where(inArray(sessionRecordings.hostId, hostIds));
}
await db
.delete(sshData)
.where(and(eq(sshData.userId, userId), eq(sshData.folder, folderName)));
@@ -1782,10 +2106,12 @@ router.post(
continue;
}
if (!["password", "key", "credential"].includes(hostData.authType)) {
if (
!["password", "key", "credential", "none"].includes(hostData.authType)
) {
results.failed++;
results.errors.push(
`Host ${i + 1}: Invalid authType. Must be 'password', 'key', or 'credential'`,
`Host ${i + 1}: Invalid authType. Must be 'password', 'key', 'credential', or 'none'`,
);
continue;
}
@@ -1840,13 +2166,38 @@ router.post(
enableTerminal: hostData.enableTerminal !== false,
enableTunnel: hostData.enableTunnel !== false,
enableFileManager: hostData.enableFileManager !== false,
enableDocker: hostData.enableDocker || false,
defaultPath: hostData.defaultPath || "/",
tunnelConnections: hostData.tunnelConnections
? JSON.stringify(hostData.tunnelConnections)
: "[]",
jumpHosts: hostData.jumpHosts
? JSON.stringify(hostData.jumpHosts)
: null,
quickActions: hostData.quickActions
? JSON.stringify(hostData.quickActions)
: null,
statsConfig: hostData.statsConfig
? JSON.stringify(hostData.statsConfig)
: null,
terminalConfig: hostData.terminalConfig
? JSON.stringify(hostData.terminalConfig)
: null,
forceKeyboardInteractive: hostData.forceKeyboardInteractive
? "true"
: "false",
notes: hostData.notes || null,
useSocks5: hostData.useSocks5 ? 1 : 0,
socks5Host: hostData.socks5Host || null,
socks5Port: hostData.socks5Port || null,
socks5Username: hostData.socks5Username || null,
socks5Password: hostData.socks5Password || null,
socks5ProxyChain: hostData.socks5ProxyChain
? JSON.stringify(hostData.socks5ProxyChain)
: null,
overrideCredentialUsername: hostData.overrideCredentialUsername
? 1
: 0,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};

View File

@@ -15,6 +15,15 @@ import {
sshCredentialUsage,
recentActivity,
snippets,
snippetFolders,
sshFolders,
commandHistory,
roles,
userRoles,
hostAccess,
sharedCredentials,
auditLogs,
sessionRecordings,
} from "../db/schema.js";
import { eq, and } from "drizzle-orm";
import bcrypt from "bcryptjs";
@@ -134,6 +143,77 @@ function isNonEmptyString(val: unknown): val is string {
const authenticateJWT = authManager.createAuthMiddleware();
const requireAdmin = authManager.createAdminMiddleware();
async function deleteUserAndRelatedData(userId: string): Promise<void> {
try {
// Delete shared credentials first (depends on hostAccess)
await db
.delete(sharedCredentials)
.where(eq(sharedCredentials.targetUserId, userId));
// Delete session recordings (depends on hostAccess)
await db
.delete(sessionRecordings)
.where(eq(sessionRecordings.userId, userId));
// Delete host access records (both granted by and granted to this user)
await db.delete(hostAccess).where(eq(hostAccess.userId, userId));
await db.delete(hostAccess).where(eq(hostAccess.grantedBy, userId));
// Delete sessions
await db.delete(sessions).where(eq(sessions.userId, userId));
// Delete user roles
await db.delete(userRoles).where(eq(userRoles.userId, userId));
// Delete audit logs
await db.delete(auditLogs).where(eq(auditLogs.userId, userId));
await db
.delete(sshCredentialUsage)
.where(eq(sshCredentialUsage.userId, userId));
await db
.delete(fileManagerRecent)
.where(eq(fileManagerRecent.userId, userId));
await db
.delete(fileManagerPinned)
.where(eq(fileManagerPinned.userId, userId));
await db
.delete(fileManagerShortcuts)
.where(eq(fileManagerShortcuts.userId, userId));
await db.delete(recentActivity).where(eq(recentActivity.userId, userId));
await db.delete(dismissedAlerts).where(eq(dismissedAlerts.userId, userId));
await db.delete(snippets).where(eq(snippets.userId, userId));
await db.delete(snippetFolders).where(eq(snippetFolders.userId, userId));
await db.delete(sshFolders).where(eq(sshFolders.userId, userId));
await db.delete(commandHistory).where(eq(commandHistory.userId, userId));
await db.delete(sshData).where(eq(sshData.userId, userId));
await db.delete(sshCredentials).where(eq(sshCredentials.userId, userId));
db.$client
.prepare("DELETE FROM settings WHERE key LIKE ?")
.run(`user_%_${userId}`);
await db.delete(users).where(eq(users.id, userId));
authLogger.success("User and all related data deleted successfully", {
operation: "delete_user_and_related_data_complete",
userId,
});
} catch (error) {
authLogger.error("Failed to delete user and related data", error, {
operation: "delete_user_and_related_data_failed",
userId,
});
throw error;
}
}
// Route: Create traditional user (username/password)
// POST /users/create
router.post("/create", async (req, res) => {
@@ -210,6 +290,34 @@ router.post("/create", async (req, res) => {
totp_backup_codes: null,
});
try {
const defaultRoleName = isFirstUser ? "admin" : "user";
const defaultRole = await db
.select({ id: roles.id })
.from(roles)
.where(eq(roles.name, defaultRoleName))
.limit(1);
if (defaultRole.length > 0) {
await db.insert(userRoles).values({
userId: id,
roleId: defaultRole[0].id,
grantedBy: id,
});
} else {
authLogger.warn("Default role not found during user registration", {
operation: "assign_default_role",
userId: id,
roleName: defaultRoleName,
});
}
} catch (roleError) {
authLogger.error("Failed to assign default role", roleError, {
operation: "assign_default_role",
userId: id,
});
}
try {
await authManager.registerUser(id, password);
} catch (encryptionError) {
@@ -816,6 +924,41 @@ router.get("/oidc/callback", async (req, res) => {
scopes: String(config.scopes),
});
try {
const defaultRoleName = isFirstUser ? "admin" : "user";
const defaultRole = await db
.select({ id: roles.id })
.from(roles)
.where(eq(roles.name, defaultRoleName))
.limit(1);
if (defaultRole.length > 0) {
await db.insert(userRoles).values({
userId: id,
roleId: defaultRole[0].id,
grantedBy: id,
});
} else {
authLogger.warn(
"Default role not found during OIDC user registration",
{
operation: "assign_default_role_oidc",
userId: id,
roleName: defaultRoleName,
},
);
}
} catch (roleError) {
authLogger.error(
"Failed to assign default role to OIDC user",
roleError,
{
operation: "assign_default_role_oidc",
userId: id,
},
);
}
try {
const sessionDurationMs =
deviceInfo.type === "desktop" || deviceInfo.type === "mobile"
@@ -1055,6 +1198,19 @@ router.post("/login", async (req, res) => {
return res.status(401).json({ error: "Incorrect password" });
}
try {
const { SharedCredentialManager } =
await import("../../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
await sharedCredManager.reEncryptPendingCredentialsForUser(userRecord.id);
} catch (error) {
authLogger.warn("Failed to re-encrypt pending shared credentials", {
operation: "reencrypt_pending_credentials",
userId: userRecord.id,
error,
});
}
if (userRecord.totp_enabled) {
const tempToken = await authManager.generateJWTToken(userRecord.id, {
pendingTOTP: true,
@@ -1128,15 +1284,7 @@ router.post("/logout", authenticateJWT, async (req, res) => {
try {
const payload = await authManager.verifyJWTToken(token);
sessionId = payload?.sessionId;
} catch (error) {
authLogger.debug(
"Token verification failed during logout (expected if token expired)",
{
operation: "logout_token_verify_failed",
userId,
},
);
}
} catch (error) {}
}
await authManager.logoutUser(userId, sessionId);
@@ -2252,36 +2400,8 @@ router.delete("/delete-user", authenticateJWT, async (req, res) => {
const targetUserId = targetUser[0].id;
try {
await db
.delete(sshCredentialUsage)
.where(eq(sshCredentialUsage.userId, targetUserId));
await db
.delete(fileManagerRecent)
.where(eq(fileManagerRecent.userId, targetUserId));
await db
.delete(fileManagerPinned)
.where(eq(fileManagerPinned.userId, targetUserId));
await db
.delete(fileManagerShortcuts)
.where(eq(fileManagerShortcuts.userId, targetUserId));
await db
.delete(recentActivity)
.where(eq(recentActivity.userId, targetUserId));
await db
.delete(dismissedAlerts)
.where(eq(dismissedAlerts.userId, targetUserId));
await db.delete(snippets).where(eq(snippets.userId, targetUserId));
await db.delete(sshData).where(eq(sshData.userId, targetUserId));
await db
.delete(sshCredentials)
.where(eq(sshCredentials.userId, targetUserId));
} catch (cleanupError) {
authLogger.error(`Cleanup failed for user ${username}:`, cleanupError);
throw cleanupError;
}
await db.delete(users).where(eq(users.id, targetUserId));
// Use the comprehensive deletion utility
await deleteUserAndRelatedData(targetUserId);
authLogger.success(
`User ${username} deleted by admin ${adminUser[0].username}`,
@@ -2696,15 +2816,7 @@ router.post("/link-oidc-to-password", authenticateJWT, async (req, res) => {
await authManager.revokeAllUserSessions(oidcUserId);
authManager.logoutUser(oidcUserId);
await db
.delete(recentActivity)
.where(eq(recentActivity.userId, oidcUserId));
await db.delete(users).where(eq(users.id, oidcUserId));
db.$client
.prepare("DELETE FROM settings WHERE key LIKE ?")
.run(`user_%_${oidcUserId}`);
await deleteUserAndRelatedData(oidcUserId);
try {
const { saveMemoryDatabaseToFile } = await import("../db/index.js");

View File

@@ -0,0 +1,103 @@
#!/bin/bash
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
ENV_FILE="$PROJECT_ROOT/.env"
log_info() {
echo -e "${BLUE}[SSL Setup]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SSL Setup]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[SSL Setup]${NC} $1"
}
log_error() {
echo -e "${RED}[SSL Setup]${NC} $1"
}
log_header() {
echo -e "${CYAN}$1${NC}"
}
generate_keys() {
log_info "Generating security keys..."
JWT_SECRET=$(openssl rand -hex 32)
log_success "Generated JWT secret"
DATABASE_KEY=$(openssl rand -hex 32)
log_success "Generated database encryption key"
echo "JWT_SECRET=$JWT_SECRET" >> "$ENV_FILE"
echo "DATABASE_KEY=$DATABASE_KEY" >> "$ENV_FILE"
log_success "Security keys added to .env file"
}
setup_env_file() {
log_info "Setting up environment configuration..."
if [[ -f "$ENV_FILE" ]]; then
log_warn ".env file already exists, creating backup..."
cp "$ENV_FILE" "$ENV_FILE.backup.$(date +%s)"
fi
cat > "$ENV_FILE" << EOF
# Termix SSL Configuration - Auto-generated $(date)
# SSL/TLS Configuration
ENABLE_SSL=true
SSL_PORT=8443
SSL_DOMAIN=localhost
PORT=8080
# Node environment
NODE_ENV=production
# CORS configuration
ALLOWED_ORIGINS=*
EOF
generate_keys
log_success "Environment configuration created at $ENV_FILE"
}
setup_ssl_certificates() {
log_info "Setting up SSL certificates..."
if [[ -f "$SCRIPT_DIR/setup-ssl.sh" ]]; then
bash "$SCRIPT_DIR/setup-ssl.sh"
else
log_error "SSL setup script not found at $SCRIPT_DIR/setup-ssl.sh"
exit 1
fi
}
main() {
if ! command -v openssl &> /dev/null; then
log_error "OpenSSL is not installed. Please install OpenSSL first."
exit 1
fi
setup_env_file
setup_ssl_certificates
}
# Run main function
main "$@"

View File

@@ -0,0 +1,121 @@
#!/bin/bash
set -e
SSL_DIR="$(dirname "$0")/../ssl"
CERT_FILE="$SSL_DIR/termix.crt"
KEY_FILE="$SSL_DIR/termix.key"
DAYS_VALID=365
DOMAIN=${SSL_DOMAIN:-"localhost"}
ALT_NAMES=${SSL_ALT_NAMES:-"DNS:localhost,DNS:127.0.0.1,DNS:*.localhost,IP:127.0.0.1"}
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() {
echo -e "${BLUE}[SSL Setup]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SSL Setup]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[SSL Setup]${NC} $1"
}
log_error() {
echo -e "${RED}[SSL Setup]${NC} $1"
}
check_existing_cert() {
if [[ -f "$CERT_FILE" && -f "$KEY_FILE" ]]; then
if openssl x509 -in "$CERT_FILE" -checkend 2592000 -noout 2>/dev/null; then
log_success "Valid SSL certificate already exists"
local expiry=$(openssl x509 -in "$CERT_FILE" -noout -enddate 2>/dev/null | cut -d= -f2)
log_info "Expires: $expiry"
return 0
else
log_warn "Existing certificate is expired or expiring soon"
fi
fi
return 1
}
generate_certificate() {
log_info "Generating new SSL certificate for domain: $DOMAIN"
mkdir -p "$SSL_DIR"
local config_file="$SSL_DIR/openssl.conf"
cat > "$config_file" << EOF
[req]
default_bits = 2048
prompt = no
default_md = sha256
distinguished_name = dn
req_extensions = v3_req
[dn]
C=US
ST=State
L=City
O=Termix
OU=IT Department
CN=$DOMAIN
[v3_req]
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
subjectAltName = @alt_names
[alt_names]
DNS.1 = localhost
DNS.2 = 127.0.0.1
DNS.3 = *.localhost
IP.1 = 127.0.0.1
EOF
if [[ -n "$SSL_ALT_NAMES" ]]; then
local counter=2
IFS=',' read -ra NAMES <<< "$SSL_ALT_NAMES"
for name in "${NAMES[@]}"; do
name=$(echo "$name" | xargs)
if [[ "$name" == DNS:* ]]; then
echo "DNS.$((counter++)) = ${name#DNS:}" >> "$config_file"
elif [[ "$name" == IP:* ]]; then
echo "IP.$((counter++)) = ${name#IP:}" >> "$config_file"
fi
done
fi
log_info "Generating private key..."
openssl genrsa -out "$KEY_FILE" 2048
log_info "Generating certificate..."
openssl req -new -x509 -key "$KEY_FILE" -out "$CERT_FILE" -days $DAYS_VALID -config "$config_file" -extensions v3_req
chmod 600 "$KEY_FILE"
chmod 644 "$CERT_FILE"
rm -f "$config_file"
log_success "SSL certificate generated successfully"
log_info "Valid for: $DAYS_VALID days"
}
main() {
if ! command -v openssl &> /dev/null; then
log_error "OpenSSL is not installed. Please install OpenSSL first."
exit 1
fi
generate_certificate
}
main "$@"

View File

@@ -0,0 +1,632 @@
import { Client as SSHClient } from "ssh2";
import { WebSocketServer, WebSocket } from "ws";
import { parse as parseUrl } from "url";
import { AuthManager } from "../utils/auth-manager.js";
import { sshData, sshCredentials } from "../database/db/schema.js";
import { and, eq } from "drizzle-orm";
import { getDb } from "../database/db/index.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { systemLogger } from "../utils/logger.js";
import type { SSHHost } from "../../types/index.js";
const dockerConsoleLogger = systemLogger;
interface SSHSession {
client: SSHClient;
stream: any;
isConnected: boolean;
containerId?: string;
shell?: string;
}
const activeSessions = new Map<string, SSHSession>();
const wss = new WebSocketServer({
host: "0.0.0.0",
port: 30008,
verifyClient: async (info) => {
try {
const url = parseUrl(info.req.url || "", true);
const token = url.query.token as string;
if (!token) {
return false;
}
const authManager = AuthManager.getInstance();
const decoded = await authManager.verifyJWTToken(token);
if (!decoded || !decoded.userId) {
return false;
}
return true;
} catch (error) {
return false;
}
},
});
async function detectShell(
session: SSHSession,
containerId: string,
): Promise<string> {
const shells = ["bash", "sh", "ash"];
for (const shell of shells) {
try {
await new Promise<void>((resolve, reject) => {
session.client.exec(
`docker exec ${containerId} which ${shell}`,
(err, stream) => {
if (err) return reject(err);
let output = "";
stream.on("data", (data: Buffer) => {
output += data.toString();
});
stream.on("close", (code: number) => {
if (code === 0 && output.trim()) {
resolve();
} else {
reject(new Error(`Shell ${shell} not found`));
}
});
stream.stderr.on("data", () => {
// Ignore stderr
});
},
);
});
return shell;
} catch {
continue;
}
}
return "sh";
}
async function createJumpHostChain(
jumpHosts: any[],
userId: string,
): Promise<SSHClient | null> {
if (!jumpHosts || jumpHosts.length === 0) {
return null;
}
let currentClient: SSHClient | null = null;
for (let i = 0; i < jumpHosts.length; i++) {
const jumpHostId = jumpHosts[i].hostId;
const jumpHostData = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
.where(and(eq(sshData.id, jumpHostId), eq(sshData.userId, userId))),
"ssh_data",
userId,
);
if (jumpHostData.length === 0) {
throw new Error(`Jump host ${jumpHostId} not found`);
}
const jumpHost = jumpHostData[0] as unknown as SSHHost;
if (typeof jumpHost.jumpHosts === "string" && jumpHost.jumpHosts) {
try {
jumpHost.jumpHosts = JSON.parse(jumpHost.jumpHosts);
} catch (e) {
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
hostId: jumpHost.id,
});
jumpHost.jumpHosts = [];
}
}
let resolvedCredentials: any = {
password: jumpHost.password,
sshKey: jumpHost.key,
keyPassword: jumpHost.keyPassword,
authType: jumpHost.authType,
};
if (jumpHost.credentialId) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, jumpHost.credentialId as number),
eq(sshCredentials.userId, userId),
),
),
"ssh_credentials",
userId,
);
if (credentials.length > 0) {
const credential = credentials[0];
resolvedCredentials = {
password: credential.password,
sshKey:
credential.private_key || credential.privateKey || credential.key,
keyPassword: credential.key_password || credential.keyPassword,
authType: credential.auth_type || credential.authType,
};
}
}
const client = new SSHClient();
const config: any = {
host: jumpHost.ip,
port: jumpHost.port || 22,
username: jumpHost.username,
tryKeyboard: true,
readyTimeout: 60000,
keepaliveInterval: 30000,
keepaliveCountMax: 120,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
};
if (
resolvedCredentials.authType === "password" &&
resolvedCredentials.password
) {
config.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.sshKey
) {
const cleanKey = resolvedCredentials.sshKey
.trim()
.replace(/\r\n/g, "\n")
.replace(/\r/g, "\n");
config.privateKey = Buffer.from(cleanKey, "utf8");
if (resolvedCredentials.keyPassword) {
config.passphrase = resolvedCredentials.keyPassword;
}
}
if (currentClient) {
await new Promise<void>((resolve, reject) => {
currentClient!.forwardOut(
"127.0.0.1",
0,
jumpHost.ip,
jumpHost.port || 22,
(err, stream) => {
if (err) return reject(err);
config.sock = stream;
resolve();
},
);
});
}
await new Promise<void>((resolve, reject) => {
client.on("ready", () => resolve());
client.on("error", reject);
client.connect(config);
});
currentClient = client;
}
return currentClient;
}
wss.on("connection", async (ws: WebSocket, req) => {
const userId = (req as any).userId;
const sessionId = `docker-console-${Date.now()}-${Math.random()}`;
let sshSession: SSHSession | null = null;
ws.on("message", async (data) => {
try {
const message = JSON.parse(data.toString());
switch (message.type) {
case "connect": {
const { hostConfig, containerId, shell, cols, rows } =
message.data as {
hostConfig: SSHHost;
containerId: string;
shell?: string;
cols?: number;
rows?: number;
};
if (
typeof hostConfig.jumpHosts === "string" &&
hostConfig.jumpHosts
) {
try {
hostConfig.jumpHosts = JSON.parse(hostConfig.jumpHosts);
} catch (e) {
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
hostId: hostConfig.id,
});
hostConfig.jumpHosts = [];
}
}
if (!hostConfig || !containerId) {
ws.send(
JSON.stringify({
type: "error",
message: "Host configuration and container ID are required",
}),
);
return;
}
if (!hostConfig.enableDocker) {
ws.send(
JSON.stringify({
type: "error",
message:
"Docker is not enabled for this host. Enable it in Host Settings.",
}),
);
return;
}
try {
let resolvedCredentials: any = {
password: hostConfig.password,
sshKey: hostConfig.key,
keyPassword: hostConfig.keyPassword,
authType: hostConfig.authType,
};
if (hostConfig.credentialId) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, hostConfig.credentialId as number),
eq(sshCredentials.userId, userId),
),
),
"ssh_credentials",
userId,
);
if (credentials.length > 0) {
const credential = credentials[0];
resolvedCredentials = {
password: credential.password,
sshKey:
credential.private_key ||
credential.privateKey ||
credential.key,
keyPassword:
credential.key_password || credential.keyPassword,
authType: credential.auth_type || credential.authType,
};
}
}
const client = new SSHClient();
const config: any = {
host: hostConfig.ip,
port: hostConfig.port || 22,
username: hostConfig.username,
tryKeyboard: true,
readyTimeout: 60000,
keepaliveInterval: 30000,
keepaliveCountMax: 120,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
};
if (
resolvedCredentials.authType === "password" &&
resolvedCredentials.password
) {
config.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.sshKey
) {
const cleanKey = resolvedCredentials.sshKey
.trim()
.replace(/\r\n/g, "\n")
.replace(/\r/g, "\n");
config.privateKey = Buffer.from(cleanKey, "utf8");
if (resolvedCredentials.keyPassword) {
config.passphrase = resolvedCredentials.keyPassword;
}
}
if (hostConfig.jumpHosts && hostConfig.jumpHosts.length > 0) {
const jumpClient = await createJumpHostChain(
hostConfig.jumpHosts,
userId,
);
if (jumpClient) {
const stream = await new Promise<any>((resolve, reject) => {
jumpClient.forwardOut(
"127.0.0.1",
0,
hostConfig.ip,
hostConfig.port || 22,
(err, stream) => {
if (err) return reject(err);
resolve(stream);
},
);
});
config.sock = stream;
}
}
await new Promise<void>((resolve, reject) => {
client.on("ready", () => resolve());
client.on("error", reject);
client.connect(config);
});
sshSession = {
client,
stream: null,
isConnected: true,
containerId,
};
activeSessions.set(sessionId, sshSession);
let shellToUse = shell || "bash";
if (shell) {
try {
await new Promise<void>((resolve, reject) => {
client.exec(
`docker exec ${containerId} which ${shell}`,
(err, stream) => {
if (err) return reject(err);
let output = "";
stream.on("data", (data: Buffer) => {
output += data.toString();
});
stream.on("close", (code: number) => {
if (code === 0 && output.trim()) {
resolve();
} else {
reject(new Error(`Shell ${shell} not available`));
}
});
stream.stderr.on("data", () => {
// Ignore stderr
});
},
);
});
} catch {
dockerConsoleLogger.warn(
`Requested shell ${shell} not found, detecting available shell`,
{
operation: "shell_validation",
sessionId,
containerId,
requestedShell: shell,
},
);
shellToUse = await detectShell(sshSession, containerId);
}
} else {
shellToUse = await detectShell(sshSession, containerId);
}
sshSession.shell = shellToUse;
const execCommand = `docker exec -it ${containerId} /bin/${shellToUse}`;
client.exec(
execCommand,
{
pty: {
term: "xterm-256color",
cols: cols || 80,
rows: rows || 24,
},
},
(err, stream) => {
if (err) {
dockerConsoleLogger.error(
"Failed to create docker exec",
err,
{
operation: "docker_exec",
sessionId,
containerId,
},
);
ws.send(
JSON.stringify({
type: "error",
message: `Failed to start console: ${err.message}`,
}),
);
return;
}
sshSession!.stream = stream;
stream.on("data", (data: Buffer) => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
type: "output",
data: data.toString("utf8"),
}),
);
}
});
stream.stderr.on("data", (data: Buffer) => {});
stream.on("close", () => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
type: "disconnected",
message: "Console session ended",
}),
);
}
if (sshSession) {
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
ws.send(
JSON.stringify({
type: "connected",
data: {
shell: shellToUse,
requestedShell: shell,
shellChanged: shell && shell !== shellToUse,
},
}),
);
},
);
} catch (error) {
dockerConsoleLogger.error("Failed to connect to container", error, {
operation: "console_connect",
sessionId,
containerId: message.data.containerId,
});
ws.send(
JSON.stringify({
type: "error",
message:
error instanceof Error
? error.message
: "Failed to connect to container",
}),
);
}
break;
}
case "input": {
if (sshSession && sshSession.stream) {
sshSession.stream.write(message.data);
}
break;
}
case "resize": {
if (sshSession && sshSession.stream) {
const { cols, rows } = message.data;
sshSession.stream.setWindow(rows, cols);
}
break;
}
case "disconnect": {
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
ws.send(
JSON.stringify({
type: "disconnected",
message: "Disconnected from container",
}),
);
}
break;
}
case "ping": {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: "pong" }));
}
break;
}
default:
dockerConsoleLogger.warn("Unknown message type", {
operation: "ws_message",
type: message.type,
});
}
} catch (error) {
dockerConsoleLogger.error("WebSocket message error", error, {
operation: "ws_message",
sessionId,
});
ws.send(
JSON.stringify({
type: "error",
message: error instanceof Error ? error.message : "An error occurred",
}),
);
}
});
ws.on("close", () => {
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
ws.on("error", (error) => {
dockerConsoleLogger.error("WebSocket error", error, {
operation: "ws_error",
sessionId,
});
if (sshSession) {
if (sshSession.stream) {
sshSession.stream.end();
}
sshSession.client.end();
activeSessions.delete(sessionId);
}
});
});
process.on("SIGTERM", () => {
activeSessions.forEach((session, sessionId) => {
if (session.stream) {
session.stream.end();
}
session.client.end();
});
activeSessions.clear();
wss.close(() => {
process.exit(0);
});
});

1904
src/backend/ssh/docker.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@ import { fileLogger, sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
import type { AuthenticatedRequest } from "../../types/index.js";
import { createSocks5Connection } from "../utils/socks5-helper.js";
function isExecutableFile(permissions: string, fileName: string): boolean {
const hasExecutePermission =
@@ -43,6 +44,58 @@ function isExecutableFile(permissions: string, fileName: string): boolean {
);
}
function modeToPermissions(mode: number): string {
const S_IFDIR = 0o040000;
const S_IFLNK = 0o120000;
const S_IFMT = 0o170000;
const type = mode & S_IFMT;
const prefix = type === S_IFDIR ? "d" : type === S_IFLNK ? "l" : "-";
const perms = [
mode & 0o400 ? "r" : "-",
mode & 0o200 ? "w" : "-",
mode & 0o100 ? "x" : "-",
mode & 0o040 ? "r" : "-",
mode & 0o020 ? "w" : "-",
mode & 0o010 ? "x" : "-",
mode & 0o004 ? "r" : "-",
mode & 0o002 ? "w" : "-",
mode & 0o001 ? "x" : "-",
].join("");
return prefix + perms;
}
function formatMtime(mtime: number): string {
const date = new Date(mtime * 1000);
const months = [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
];
const month = months[date.getMonth()];
const day = date.getDate().toString().padStart(2, " ");
const now = new Date();
const sixMonthsAgo = new Date(now.getTime() - 180 * 24 * 60 * 60 * 1000);
if (date > sixMonthsAgo) {
const hours = date.getHours().toString().padStart(2, "0");
const minutes = date.getMinutes().toString().padStart(2, "0");
return `${month} ${day} ${hours}:${minutes}`;
}
return `${month} ${day} ${date.getFullYear()}`;
}
const app = express();
app.use(
@@ -278,6 +331,7 @@ interface PendingTOTPSession {
prompts?: Array<{ prompt: string; echo: boolean }>;
totpPromptIndex?: number;
resolvedPassword?: string;
totpAttempts: number;
}
const sshSessions: Record<string, SSHSession> = {};
@@ -341,6 +395,27 @@ function getMimeType(fileName: string): string {
return mimeTypes[ext || ""] || "application/octet-stream";
}
function detectBinary(buffer: Buffer): boolean {
if (buffer.length === 0) return false;
const sampleSize = Math.min(buffer.length, 8192);
let nullBytes = 0;
for (let i = 0; i < sampleSize; i++) {
const byte = buffer[i];
if (byte === 0) {
nullBytes++;
}
if (byte < 32 && byte !== 9 && byte !== 10 && byte !== 13) {
if (++nullBytes > 1) return true;
}
}
return nullBytes / sampleSize > 0.01;
}
app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
const {
sessionId,
@@ -356,6 +431,12 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
userProvidedPassword,
forceKeyboardInteractive,
jumpHosts,
useSocks5,
socks5Host,
socks5Port,
socks5Username,
socks5Password,
socks5ProxyChain,
} = req.body;
const userId = (req as AuthenticatedRequest).userId;
@@ -382,6 +463,15 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
if (sshSessions[sessionId]?.isConnected) {
cleanupSession(sessionId);
}
// Clean up any stale pending TOTP sessions
if (pendingTOTPSessions[sessionId]) {
try {
pendingTOTPSessions[sessionId].client.end();
} catch {}
delete pendingTOTPSessions[sessionId];
}
const client = new SSHClient();
let resolvedCredentials = { password, sshKey, keyPassword, authType };
@@ -545,9 +635,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
.json({ error: "Password required for password authentication" });
}
if (!forceKeyboardInteractive) {
config.password = resolvedCredentials.password;
}
config.password = resolvedCredentials.password;
} else if (resolvedCredentials.authType === "none") {
} else {
fileLogger.warn(
@@ -713,6 +801,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
prompts,
totpPromptIndex,
resolvedPassword: resolvedCredentials.password,
totpAttempts: 0,
};
res.json({
@@ -785,6 +874,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
prompts,
totpPromptIndex: passwordPromptIndex,
resolvedPassword: resolvedCredentials.password,
totpAttempts: 0,
};
res.json({
@@ -808,7 +898,47 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
},
);
if (jumpHosts && jumpHosts.length > 0 && userId) {
if (
useSocks5 &&
(socks5Host || (socks5ProxyChain && (socks5ProxyChain as any).length > 0))
) {
try {
const socks5Socket = await createSocks5Connection(ip, port, {
useSocks5,
socks5Host,
socks5Port,
socks5Username,
socks5Password,
socks5ProxyChain: socks5ProxyChain as any,
});
if (socks5Socket) {
config.sock = socks5Socket;
client.connect(config);
return;
} else {
fileLogger.error("SOCKS5 socket is null for SFTP", undefined, {
operation: "sftp_socks5_socket_null",
sessionId,
});
}
} catch (socks5Error) {
fileLogger.error("SOCKS5 connection failed", socks5Error, {
operation: "socks5_connect",
sessionId,
hostId,
proxyHost: socks5Host,
proxyPort: socks5Port || 1080,
});
return res.status(500).json({
error:
"SOCKS5 proxy connection failed: " +
(socks5Error instanceof Error
? socks5Error.message
: "Unknown error"),
});
}
} else if (jumpHosts && jumpHosts.length > 0 && userId) {
try {
const jumpClient = await createJumpHostChain(jumpHosts, userId);
@@ -891,9 +1021,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
delete pendingTOTPSessions[sessionId];
try {
session.client.end();
} catch (error) {
sshLogger.debug("Operation failed, continuing", { error });
}
} catch (error) {}
fileLogger.warn("TOTP session timeout before code submission", {
operation: "file_totp_verify",
sessionId,
@@ -1076,88 +1204,187 @@ app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
sshConn.lastActive = Date.now();
sshConn.activeOperations++;
const escapedPath = sshPath.replace(/'/g, "'\"'\"'");
sshConn.client.exec(`command ls -la '${escapedPath}'`, (err, stream) => {
if (err) {
sshConn.activeOperations--;
fileLogger.error("SSH listFiles error:", err);
return res.status(500).json({ error: err.message });
}
let data = "";
let errorData = "";
stream.on("data", (chunk: Buffer) => {
data += chunk.toString();
});
stream.stderr.on("data", (chunk: Buffer) => {
errorData += chunk.toString();
});
stream.on("close", (code) => {
sshConn.activeOperations--;
if (code !== 0) {
fileLogger.error(
`SSH listFiles command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
);
return res.status(500).json({ error: `Command failed: ${errorData}` });
}
const lines = data.split("\n").filter((line) => line.trim());
const files = [];
for (let i = 1; i < lines.length; i++) {
const line = lines[i];
const parts = line.split(/\s+/);
if (parts.length >= 9) {
const permissions = parts[0];
const owner = parts[2];
const group = parts[3];
const size = parseInt(parts[4], 10);
let dateStr = "";
const nameStartIndex = 8;
if (parts[5] && parts[6] && parts[7]) {
dateStr = `${parts[5]} ${parts[6]} ${parts[7]}`;
}
const name = parts.slice(nameStartIndex).join(" ");
const isDirectory = permissions.startsWith("d");
const isLink = permissions.startsWith("l");
if (name === "." || name === "..") continue;
let actualName = name;
let linkTarget = undefined;
if (isLink && name.includes(" -> ")) {
const linkParts = name.split(" -> ");
actualName = linkParts[0];
linkTarget = linkParts[1];
}
files.push({
name: actualName,
type: isDirectory ? "directory" : isLink ? "link" : "file",
size: isDirectory ? undefined : size,
modified: dateStr,
permissions,
owner,
group,
linkTarget,
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${actualName}`,
executable:
!isDirectory && !isLink
? isExecutableFile(permissions, actualName)
: false,
});
const trySFTP = () => {
try {
sshConn.client.sftp((err, sftp) => {
if (err) {
fileLogger.warn(
`SFTP failed for listFiles, trying fallback: ${err.message}`,
);
tryFallbackMethod();
return;
}
sftp.readdir(sshPath, (readdirErr, list) => {
if (readdirErr) {
fileLogger.warn(
`SFTP readdir failed, trying fallback: ${readdirErr.message}`,
);
tryFallbackMethod();
return;
}
const symlinks: Array<{ index: number; path: string }> = [];
const files: Array<{
name: string;
type: string;
size: number | undefined;
modified: string;
permissions: string;
owner: string;
group: string;
linkTarget: string | undefined;
path: string;
executable: boolean;
}> = [];
for (const entry of list) {
if (entry.filename === "." || entry.filename === "..") continue;
const attrs = entry.attrs;
const permissions = modeToPermissions(attrs.mode);
const isDirectory = attrs.isDirectory();
const isLink = attrs.isSymbolicLink();
const fileEntry = {
name: entry.filename,
type: isDirectory ? "directory" : isLink ? "link" : "file",
size: isDirectory ? undefined : attrs.size,
modified: formatMtime(attrs.mtime),
permissions,
owner: String(attrs.uid),
group: String(attrs.gid),
linkTarget: undefined as string | undefined,
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${entry.filename}`,
executable:
!isDirectory && !isLink
? isExecutableFile(permissions, entry.filename)
: false,
};
if (isLink) {
symlinks.push({ index: files.length, path: fileEntry.path });
}
files.push(fileEntry);
}
if (symlinks.length === 0) {
sshConn.activeOperations--;
return res.json({ files, path: sshPath });
}
let resolved = 0;
for (const link of symlinks) {
sftp.readlink(link.path, (linkErr, target) => {
resolved++;
if (!linkErr && target) {
files[link.index].linkTarget = target;
}
if (resolved === symlinks.length) {
sshConn.activeOperations--;
res.json({ files, path: sshPath });
}
});
}
});
});
} catch (sftpErr: unknown) {
const errMsg =
sftpErr instanceof Error ? sftpErr.message : "Unknown error";
fileLogger.warn(`SFTP connection error, trying fallback: ${errMsg}`);
tryFallbackMethod();
}
};
const tryFallbackMethod = () => {
const escapedPath = sshPath.replace(/'/g, "'\"'\"'");
sshConn.client.exec(`command ls -la '${escapedPath}'`, (err, stream) => {
if (err) {
sshConn.activeOperations--;
fileLogger.error("SSH listFiles error:", err);
return res.status(500).json({ error: err.message });
}
res.json({ files, path: sshPath });
let data = "";
let errorData = "";
stream.on("data", (chunk: Buffer) => {
data += chunk.toString();
});
stream.stderr.on("data", (chunk: Buffer) => {
errorData += chunk.toString();
});
stream.on("close", (code) => {
sshConn.activeOperations--;
if (code !== 0) {
fileLogger.error(
`SSH listFiles command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
);
return res
.status(500)
.json({ error: `Command failed: ${errorData}` });
}
const lines = data.split("\n").filter((line) => line.trim());
const files = [];
for (let i = 1; i < lines.length; i++) {
const line = lines[i];
const parts = line.split(/\s+/);
if (parts.length >= 9) {
const permissions = parts[0];
const owner = parts[2];
const group = parts[3];
const size = parseInt(parts[4], 10);
let dateStr = "";
const nameStartIndex = 8;
if (parts[5] && parts[6] && parts[7]) {
dateStr = `${parts[5]} ${parts[6]} ${parts[7]}`;
}
const name = parts.slice(nameStartIndex).join(" ");
const isDirectory = permissions.startsWith("d");
const isLink = permissions.startsWith("l");
if (name === "." || name === "..") continue;
let actualName = name;
let linkTarget = undefined;
if (isLink && name.includes(" -> ")) {
const linkParts = name.split(" -> ");
actualName = linkParts[0];
linkTarget = linkParts[1];
}
files.push({
name: actualName,
type: isDirectory ? "directory" : isLink ? "link" : "file",
size: isDirectory ? undefined : size,
modified: dateStr,
permissions,
owner,
group,
linkTarget,
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${actualName}`,
executable:
!isDirectory && !isLink
? isExecutableFile(permissions, actualName)
: false,
});
}
}
res.json({ files, path: sshPath });
});
});
});
};
trySFTP();
});
app.get("/ssh/file_manager/ssh/identifySymlink", (req, res) => {
@@ -1313,11 +1540,11 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
return res.status(500).json({ error: err.message });
}
let data = "";
let binaryData = Buffer.alloc(0);
let errorData = "";
stream.on("data", (chunk: Buffer) => {
data += chunk.toString();
binaryData = Buffer.concat([binaryData, chunk]);
});
stream.stderr.on("data", (chunk: Buffer) => {
@@ -1341,7 +1568,23 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
});
}
res.json({ content: data, path: filePath });
const isBinary = detectBinary(binaryData);
if (isBinary) {
const base64Content = binaryData.toString("base64");
res.json({
content: base64Content,
path: filePath,
encoding: "base64",
});
} else {
const textContent = binaryData.toString("utf8");
res.json({
content: textContent,
path: filePath,
encoding: "utf8",
});
}
});
});
});
@@ -1385,7 +1628,16 @@ app.post("/ssh/file_manager/ssh/writeFile", async (req, res) => {
let fileBuffer;
try {
if (typeof content === "string") {
fileBuffer = Buffer.from(content, "utf8");
try {
const testBuffer = Buffer.from(content, "base64");
if (testBuffer.toString("base64") === content) {
fileBuffer = testBuffer;
} else {
fileBuffer = Buffer.from(content, "utf8");
}
} catch {
fileBuffer = Buffer.from(content, "utf8");
}
} else if (Buffer.isBuffer(content)) {
fileBuffer = content;
} else {
@@ -1461,7 +1713,22 @@ app.post("/ssh/file_manager/ssh/writeFile", async (req, res) => {
const tryFallbackMethod = () => {
try {
const base64Content = Buffer.from(content, "utf8").toString("base64");
let contentBuffer: Buffer;
if (typeof content === "string") {
try {
contentBuffer = Buffer.from(content, "base64");
if (contentBuffer.toString("base64") !== content) {
contentBuffer = Buffer.from(content, "utf8");
}
} catch {
contentBuffer = Buffer.from(content, "utf8");
}
} else if (Buffer.isBuffer(content)) {
contentBuffer = content;
} else {
contentBuffer = Buffer.from(content);
}
const base64Content = contentBuffer.toString("base64");
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
const writeCommand = `echo '${base64Content}' | base64 -d > '${escapedPath}' && echo "SUCCESS"`;
@@ -1579,7 +1846,7 @@ app.post("/ssh/file_manager/ssh/uploadFile", async (req, res) => {
let fileBuffer;
try {
if (typeof content === "string") {
fileBuffer = Buffer.from(content, "utf8");
fileBuffer = Buffer.from(content, "base64");
} else if (Buffer.isBuffer(content)) {
fileBuffer = content;
} else {
@@ -1662,7 +1929,22 @@ app.post("/ssh/file_manager/ssh/uploadFile", async (req, res) => {
const tryFallbackMethod = () => {
try {
const base64Content = Buffer.from(content, "utf8").toString("base64");
let contentBuffer: Buffer;
if (typeof content === "string") {
try {
contentBuffer = Buffer.from(content, "base64");
if (contentBuffer.toString("base64") !== content) {
contentBuffer = Buffer.from(content, "utf8");
}
} catch {
contentBuffer = Buffer.from(content, "utf8");
}
} else if (Buffer.isBuffer(content)) {
contentBuffer = content;
} else {
contentBuffer = Buffer.from(content);
}
const base64Content = contentBuffer.toString("base64");
const chunkSize = 1000000;
const chunks = [];
@@ -2940,21 +3222,10 @@ app.post("/ssh/file_manager/ssh/extractArchive", async (req, res) => {
let errorOutput = "";
stream.on("data", (data: Buffer) => {
fileLogger.debug("Extract stdout", {
operation: "extract_archive",
sessionId,
output: data.toString(),
});
});
stream.on("data", (data: Buffer) => {});
stream.stderr.on("data", (data: Buffer) => {
errorOutput += data.toString();
fileLogger.debug("Extract stderr", {
operation: "extract_archive",
sessionId,
error: data.toString(),
});
});
stream.on("close", (code: number) => {
@@ -3132,21 +3403,10 @@ app.post("/ssh/file_manager/ssh/compressFiles", async (req, res) => {
let errorOutput = "";
stream.on("data", (data: Buffer) => {
fileLogger.debug("Compress stdout", {
operation: "compress_files",
sessionId,
output: data.toString(),
});
});
stream.on("data", (data: Buffer) => {});
stream.stderr.on("data", (data: Buffer) => {
errorOutput += data.toString();
fileLogger.debug("Compress stderr", {
operation: "compress_files",
sessionId,
error: data.toString(),
});
});
stream.on("close", (code: number) => {

File diff suppressed because it is too large Load Diff

View File

@@ -14,6 +14,7 @@ import { sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
import { UserCrypto } from "../utils/user-crypto.js";
import { createSocks5Connection } from "../utils/socks5-helper.js";
interface ConnectToHostData {
cols: number;
@@ -32,6 +33,12 @@ interface ConnectToHostData {
userId?: string;
forceKeyboardInteractive?: boolean;
jumpHosts?: Array<{ hostId: number }>;
useSocks5?: boolean;
socks5Host?: string;
socks5Port?: number;
socks5Username?: string;
socks5Password?: string;
socks5ProxyChain?: unknown;
};
initialPath?: string;
executeCommand?: string;
@@ -130,10 +137,12 @@ async function createJumpHostChain(
const clients: Client[] = [];
try {
for (let i = 0; i < jumpHosts.length; i++) {
const jumpHostConfig = await resolveJumpHost(jumpHosts[i].hostId, userId);
const jumpHostConfigs = await Promise.all(
jumpHosts.map((jh) => resolveJumpHost(jh.hostId, userId)),
);
if (!jumpHostConfig) {
for (let i = 0; i < jumpHostConfigs.length; i++) {
if (!jumpHostConfigs[i]) {
sshLogger.error(`Jump host ${i + 1} not found`, undefined, {
operation: "jump_host_chain",
hostId: jumpHosts[i].hostId,
@@ -141,6 +150,10 @@ async function createJumpHostChain(
clients.forEach((c) => c.end());
return null;
}
}
for (let i = 0; i < jumpHostConfigs.length; i++) {
const jumpHostConfig = jumpHostConfigs[i];
const jumpClient = new Client();
clients.push(jumpClient);
@@ -316,9 +329,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
let sshConn: Client | null = null;
let sshStream: ClientChannel | null = null;
let pingInterval: NodeJS.Timeout | null = null;
let keyboardInteractiveFinish: ((responses: string[]) => void) | null = null;
let totpPromptSent = false;
let totpAttempts = 0;
let totpTimeout: NodeJS.Timeout | null = null;
let isKeyboardInteractive = false;
let keyboardInteractiveResponded = false;
let isConnecting = false;
@@ -435,9 +449,15 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "totp_response": {
const totpData = data as TOTPResponseData;
if (keyboardInteractiveFinish && totpData?.code) {
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
const totpCode = totpData.code;
totpAttempts++;
keyboardInteractiveFinish([totpCode]);
keyboardInteractiveFinish = null;
totpPromptSent = false;
} else {
sshLogger.warn("TOTP response received but no callback available", {
operation: "totp_response_error",
@@ -458,6 +478,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "password_response": {
const passwordData = data as TOTPResponseData;
if (keyboardInteractiveFinish && passwordData?.code) {
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
const password = passwordData.code;
keyboardInteractiveFinish([password]);
keyboardInteractiveFinish = null;
@@ -597,6 +621,13 @@ wss.on("connection", async (ws: WebSocket, req) => {
isConnecting,
isConnected,
});
ws.send(
JSON.stringify({
type: "error",
message: "Connection already in progress",
code: "DUPLICATE_CONNECTION",
}),
);
return;
}
@@ -730,6 +761,36 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
sshLogger.info("Creating shell", {
operation: "ssh_shell_start",
hostId: id,
ip,
port,
username,
});
let shellCallbackReceived = false;
const shellTimeout = setTimeout(() => {
if (!shellCallbackReceived && isShellInitializing) {
sshLogger.error("Shell creation timeout - no response from server", {
operation: "ssh_shell_timeout",
hostId: id,
ip,
port,
username,
});
isShellInitializing = false;
ws.send(
JSON.stringify({
type: "error",
message:
"Shell creation timeout. The server may not support interactive shells or the connection was interrupted.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 15000);
conn.shell(
{
rows: data.rows,
@@ -737,6 +798,8 @@ wss.on("connection", async (ws: WebSocket, req) => {
term: "xterm-256color",
} as PseudoTtyOptions,
(err, stream) => {
shellCallbackReceived = true;
clearTimeout(shellTimeout);
isShellInitializing = false;
if (err) {
@@ -753,6 +816,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
message: "Shell error: " + err.message,
}),
);
cleanupSSH(connectionTimeout);
return;
}
@@ -802,8 +866,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
);
});
setupPingInterval();
if (initialPath && initialPath.trim() !== "") {
const cdCommand = `cd "${initialPath.replace(/"/g, '\\"')}" && pwd\n`;
stream.write(cdCommand);
@@ -940,6 +1002,31 @@ wss.on("connection", async (ws: WebSocket, req) => {
sshConn.on("close", () => {
clearTimeout(connectionTimeout);
if (isShellInitializing || (isConnected && !sshStream)) {
sshLogger.warn("SSH connection closed during shell initialization", {
operation: "ssh_close_during_init",
hostId: id,
ip,
port,
username,
isShellInitializing,
hasStream: !!sshStream,
});
ws.send(
JSON.stringify({
type: "error",
message:
"Connection closed during shell initialization. The server may have rejected the shell request.",
}),
);
} else if (!sshStream) {
ws.send(
JSON.stringify({
type: "disconnected",
message: "Connection closed",
}),
);
}
cleanupSSH(connectionTimeout);
});
@@ -987,6 +1074,25 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
totpTimeout = setTimeout(() => {
if (keyboardInteractiveFinish) {
keyboardInteractiveFinish = null;
totpPromptSent = false;
sshLogger.warn("TOTP prompt timeout", {
operation: "totp_timeout",
hostId: id,
});
ws.send(
JSON.stringify({
type: "error",
message: "TOTP verification timeout. Please reconnect.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 180000);
ws.send(
JSON.stringify({
type: "totp_required",
@@ -1021,6 +1127,24 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
totpTimeout = setTimeout(() => {
if (keyboardInteractiveFinish) {
keyboardInteractiveFinish = null;
keyboardInteractiveResponded = false;
sshLogger.warn("Password prompt timeout", {
operation: "password_timeout",
hostId: id,
});
ws.send(
JSON.stringify({
type: "error",
message: "Password verification timeout. Please reconnect.",
}),
);
cleanupSSH(connectionTimeout);
}
}, 180000);
ws.send(
JSON.stringify({
type: "password_required",
@@ -1128,9 +1252,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
if (!hostConfig.forceKeyboardInteractive) {
connectConfig.password = resolvedCredentials.password;
}
connectConfig.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.key
@@ -1183,6 +1305,49 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
if (
hostConfig.useSocks5 &&
(hostConfig.socks5Host ||
(hostConfig.socks5ProxyChain &&
(hostConfig.socks5ProxyChain as any).length > 0))
) {
try {
const socks5Socket = await createSocks5Connection(ip, port, {
useSocks5: hostConfig.useSocks5,
socks5Host: hostConfig.socks5Host,
socks5Port: hostConfig.socks5Port,
socks5Username: hostConfig.socks5Username,
socks5Password: hostConfig.socks5Password,
socks5ProxyChain: hostConfig.socks5ProxyChain as any,
});
if (socks5Socket) {
connectConfig.sock = socks5Socket;
sshConn.connect(connectConfig);
return;
}
} catch (socks5Error) {
sshLogger.error("SOCKS5 connection failed", socks5Error, {
operation: "socks5_connect",
hostId: id,
proxyHost: hostConfig.socks5Host,
proxyPort: hostConfig.socks5Port || 1080,
});
ws.send(
JSON.stringify({
type: "error",
message:
"SOCKS5 proxy connection failed: " +
(socks5Error instanceof Error
? socks5Error.message
: "Unknown error"),
}),
);
cleanupSSH(connectionTimeout);
return;
}
}
if (
hostConfig.jumpHosts &&
hostConfig.jumpHosts.length > 0 &&
@@ -1279,9 +1444,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
clearTimeout(timeoutId);
}
if (pingInterval) {
clearInterval(pingInterval);
pingInterval = null;
if (totpTimeout) {
clearTimeout(totpTimeout);
totpTimeout = null;
}
if (sshStream) {
@@ -1309,35 +1474,21 @@ wss.on("connection", async (ws: WebSocket, req) => {
}
totpPromptSent = false;
totpAttempts = 0;
isKeyboardInteractive = false;
keyboardInteractiveResponded = false;
keyboardInteractiveFinish = null;
isConnecting = false;
isConnected = false;
setTimeout(() => {
isCleaningUp = false;
}, 100);
isCleaningUp = false;
}
function setupPingInterval() {
pingInterval = setInterval(() => {
if (sshConn && sshStream) {
try {
sshStream.write("\x00");
} catch (e: unknown) {
sshLogger.error(
"SSH keepalive failed: " +
(e instanceof Error ? e.message : "Unknown error"),
);
cleanupSSH();
}
} else if (!sshConn || !sshStream) {
if (pingInterval) {
clearInterval(pingInterval);
pingInterval = null;
}
}
}, 30000);
}
// Note: PTY-level keepalive (writing \x00 to the stream) was removed.
// It was causing ^@ characters to appear in terminals with echoctl enabled.
// SSH-level keepalive is configured via connectConfig (keepaliveInterval,
// keepaliveCountMax, tcpKeepAlive), which handles connection health monitoring
// without producing visible output on the terminal.
//
// See: https://github.com/Termix-SSH/Support/issues/232
// See: https://github.com/Termix-SSH/Support/issues/309
});

View File

@@ -1,4 +1,4 @@
import express from "express";
import express, { type Response } from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import { Client } from "ssh2";
@@ -13,12 +13,16 @@ import type {
TunnelStatus,
VerificationData,
ErrorType,
AuthenticatedRequest,
} from "../../types/index.js";
import { CONNECTION_STATES } from "../../types/index.js";
import { tunnelLogger, sshLogger } from "../utils/logger.js";
import { SystemCrypto } from "../utils/system-crypto.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { DataCrypto } from "../utils/data-crypto.js";
import { createSocks5Connection } from "../utils/socks5-helper.js";
import { AuthManager } from "../utils/auth-manager.js";
import { PermissionManager } from "../utils/permission-manager.js";
const app = express();
app.use(
@@ -63,6 +67,10 @@ app.use(
app.use(cookieParser());
app.use(express.json());
const authManager = AuthManager.getInstance();
const permissionManager = PermissionManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
const activeTunnels = new Map<string, Client>();
const retryCounters = new Map<string, number>();
const connectionStatus = new Map<string, TunnelStatus>();
@@ -77,6 +85,7 @@ const tunnelConnecting = new Set<string>();
const tunnelConfigs = new Map<string, TunnelConfig>();
const activeTunnelProcesses = new Map<string, ChildProcess>();
const pendingTunnelOperations = new Map<string, Promise<void>>();
function broadcastTunnelStatus(tunnelName: string, status: TunnelStatus): void {
if (
@@ -154,10 +163,75 @@ function getTunnelMarker(tunnelName: string) {
return `TUNNEL_MARKER_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
}
function cleanupTunnelResources(
function normalizeTunnelName(
hostId: number,
tunnelIndex: number,
displayName: string,
sourcePort: number,
endpointHost: string,
endpointPort: number,
): string {
return `${hostId}::${tunnelIndex}::${displayName}::${sourcePort}::${endpointHost}::${endpointPort}`;
}
function parseTunnelName(tunnelName: string): {
hostId?: number;
tunnelIndex?: number;
displayName: string;
sourcePort: string;
endpointHost: string;
endpointPort: string;
isLegacyFormat: boolean;
} {
const parts = tunnelName.split("::");
if (parts.length === 6) {
return {
hostId: parseInt(parts[0]),
tunnelIndex: parseInt(parts[1]),
displayName: parts[2],
sourcePort: parts[3],
endpointHost: parts[4],
endpointPort: parts[5],
isLegacyFormat: false,
};
}
tunnelLogger.warn(`Legacy tunnel name format: ${tunnelName}`);
const legacyParts = tunnelName.split("_");
return {
displayName: legacyParts[0] || "unknown",
sourcePort: legacyParts[legacyParts.length - 3] || "0",
endpointHost: legacyParts[legacyParts.length - 2] || "unknown",
endpointPort: legacyParts[legacyParts.length - 1] || "0",
isLegacyFormat: true,
};
}
function validateTunnelConfig(
tunnelName: string,
tunnelConfig: TunnelConfig,
): boolean {
const parsed = parseTunnelName(tunnelName);
if (parsed.isLegacyFormat) {
return true;
}
return (
parsed.hostId === tunnelConfig.sourceHostId &&
parsed.tunnelIndex === tunnelConfig.tunnelIndex &&
String(parsed.sourcePort) === String(tunnelConfig.sourcePort) &&
parsed.endpointHost === tunnelConfig.endpointHost &&
String(parsed.endpointPort) === String(tunnelConfig.endpointPort)
);
}
async function cleanupTunnelResources(
tunnelName: string,
forceCleanup = false,
): void {
): Promise<void> {
if (cleanupInProgress.has(tunnelName)) {
return;
}
@@ -170,13 +244,16 @@ function cleanupTunnelResources(
const tunnelConfig = tunnelConfigs.get(tunnelName);
if (tunnelConfig) {
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
cleanupInProgress.delete(tunnelName);
if (err) {
tunnelLogger.error(
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
);
}
await new Promise<void>((resolve) => {
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
cleanupInProgress.delete(tunnelName);
if (err) {
tunnelLogger.error(
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
);
}
resolve();
});
});
} else {
cleanupInProgress.delete(tunnelName);
@@ -272,11 +349,11 @@ function resetRetryState(tunnelName: string): void {
});
}
function handleDisconnect(
async function handleDisconnect(
tunnelName: string,
tunnelConfig: TunnelConfig | null,
shouldRetry = true,
): void {
): Promise<void> {
if (tunnelVerifications.has(tunnelName)) {
try {
const verification = tunnelVerifications.get(tunnelName);
@@ -286,7 +363,11 @@ function handleDisconnect(
tunnelVerifications.delete(tunnelName);
}
cleanupTunnelResources(tunnelName);
while (cleanupInProgress.has(tunnelName)) {
await new Promise((resolve) => setTimeout(resolve, 100));
}
await cleanupTunnelResources(tunnelName);
if (manualDisconnects.has(tunnelName)) {
resetRetryState(tunnelName);
@@ -490,43 +571,76 @@ async function connectSSHTunnel(
authMethod: tunnelConfig.sourceAuthMethod,
};
if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
try {
const userDataKey = DataCrypto.getUserDataKey(tunnelConfig.sourceUserId);
if (userDataKey) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
eq(sshCredentials.userId, tunnelConfig.sourceUserId),
),
),
"ssh_credentials",
tunnelConfig.sourceUserId,
);
const effectiveUserId =
tunnelConfig.requestingUserId || tunnelConfig.sourceUserId;
if (credentials.length > 0) {
const credential = credentials[0];
resolvedSourceCredentials = {
password: credential.password as string | undefined,
sshKey: (credential.private_key ||
credential.privateKey ||
credential.key) as string | undefined,
keyPassword: (credential.key_password || credential.keyPassword) as
| string
| undefined,
keyType: (credential.key_type || credential.keyType) as
| string
| undefined,
authMethod: (credential.auth_type || credential.authType) as string,
};
if (tunnelConfig.sourceCredentialId && effectiveUserId) {
try {
if (
tunnelConfig.requestingUserId &&
tunnelConfig.requestingUserId !== tunnelConfig.sourceUserId
) {
const { SharedCredentialManager } =
await import("../utils/shared-credential-manager.js");
const sharedCredManager = SharedCredentialManager.getInstance();
if (tunnelConfig.sourceHostId) {
const sharedCred = await sharedCredManager.getSharedCredentialForUser(
tunnelConfig.sourceHostId,
tunnelConfig.requestingUserId,
);
if (sharedCred) {
resolvedSourceCredentials = {
password: sharedCred.password,
sshKey: sharedCred.key,
keyPassword: sharedCred.keyPassword,
keyType: sharedCred.keyType,
authMethod: sharedCred.authType,
};
} else {
const errorMessage = `Cannot connect tunnel '${tunnelName}': shared credentials not available`;
tunnelLogger.error(errorMessage);
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.FAILED,
reason: errorMessage,
});
return;
}
}
} else {
const userDataKey = DataCrypto.getUserDataKey(effectiveUserId);
if (userDataKey) {
const credentials = await SimpleDBOps.select(
getDb()
.select()
.from(sshCredentials)
.where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
"ssh_credentials",
effectiveUserId,
);
if (credentials.length > 0) {
const credential = credentials[0];
resolvedSourceCredentials = {
password: credential.password as string | undefined,
sshKey: (credential.private_key ||
credential.privateKey ||
credential.key) as string | undefined,
keyPassword: (credential.key_password ||
credential.keyPassword) as string | undefined,
keyType: (credential.key_type || credential.keyType) as
| string
| undefined,
authMethod: (credential.auth_type ||
credential.authType) as string,
};
}
}
}
} catch (error) {
tunnelLogger.warn("Failed to resolve source credentials from database", {
tunnelLogger.warn("Failed to resolve source credentials", {
operation: "tunnel_connect",
tunnelName,
credentialId: tunnelConfig.sourceCredentialId,
@@ -581,12 +695,7 @@ async function connectSSHTunnel(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, tunnelConfig.endpointCredentialId),
eq(sshCredentials.userId, tunnelConfig.endpointUserId),
),
),
.where(eq(sshCredentials.id, tunnelConfig.endpointCredentialId)),
"ssh_credentials",
tunnelConfig.endpointUserId,
);
@@ -1016,6 +1125,51 @@ async function connectSSHTunnel(
});
}
if (
tunnelConfig.useSocks5 &&
(tunnelConfig.socks5Host ||
(tunnelConfig.socks5ProxyChain &&
tunnelConfig.socks5ProxyChain.length > 0))
) {
try {
const socks5Socket = await createSocks5Connection(
tunnelConfig.sourceIP,
tunnelConfig.sourceSSHPort,
{
useSocks5: tunnelConfig.useSocks5,
socks5Host: tunnelConfig.socks5Host,
socks5Port: tunnelConfig.socks5Port,
socks5Username: tunnelConfig.socks5Username,
socks5Password: tunnelConfig.socks5Password,
socks5ProxyChain: tunnelConfig.socks5ProxyChain,
},
);
if (socks5Socket) {
connOptions.sock = socks5Socket;
conn.connect(connOptions);
return;
}
} catch (socks5Error) {
tunnelLogger.error("SOCKS5 connection failed for tunnel", socks5Error, {
operation: "socks5_connect",
tunnelName,
proxyHost: tunnelConfig.socks5Host,
proxyPort: tunnelConfig.socks5Port || 1080,
});
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.FAILED,
reason:
"SOCKS5 proxy connection failed: " +
(socks5Error instanceof Error
? socks5Error.message
: "Unknown error"),
});
return;
}
}
conn.connect(connOptions);
}
@@ -1042,12 +1196,7 @@ async function killRemoteTunnelByMarker(
getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
eq(sshCredentials.userId, tunnelConfig.sourceUserId),
),
),
.where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
"ssh_credentials",
tunnelConfig.sourceUserId,
);
@@ -1248,7 +1397,57 @@ async function killRemoteTunnelByMarker(
callback(err);
});
conn.connect(connOptions);
if (
tunnelConfig.useSocks5 &&
(tunnelConfig.socks5Host ||
(tunnelConfig.socks5ProxyChain &&
tunnelConfig.socks5ProxyChain.length > 0))
) {
(async () => {
try {
const socks5Socket = await createSocks5Connection(
tunnelConfig.sourceIP,
tunnelConfig.sourceSSHPort,
{
useSocks5: tunnelConfig.useSocks5,
socks5Host: tunnelConfig.socks5Host,
socks5Port: tunnelConfig.socks5Port,
socks5Username: tunnelConfig.socks5Username,
socks5Password: tunnelConfig.socks5Password,
socks5ProxyChain: tunnelConfig.socks5ProxyChain,
},
);
if (socks5Socket) {
connOptions.sock = socks5Socket;
conn.connect(connOptions);
} else {
callback(new Error("Failed to create SOCKS5 connection"));
}
} catch (socks5Error) {
tunnelLogger.error(
"SOCKS5 connection failed for killing tunnel",
socks5Error,
{
operation: "socks5_connect_kill",
tunnelName,
proxyHost: tunnelConfig.socks5Host,
proxyPort: tunnelConfig.socks5Port || 1080,
},
);
callback(
new Error(
"SOCKS5 proxy connection failed: " +
(socks5Error instanceof Error
? socks5Error.message
: "Unknown error"),
),
);
}
})();
} else {
conn.connect(connOptions);
}
}
app.get("/ssh/tunnel/status", (req, res) => {
@@ -1266,103 +1465,291 @@ app.get("/ssh/tunnel/status/:tunnelName", (req, res) => {
res.json({ name: tunnelName, status });
});
app.post("/ssh/tunnel/connect", (req, res) => {
const tunnelConfig: TunnelConfig = req.body;
app.post(
"/ssh/tunnel/connect",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const tunnelConfig: TunnelConfig = req.body;
const userId = req.userId;
if (!tunnelConfig || !tunnelConfig.name) {
return res.status(400).json({ error: "Invalid tunnel configuration" });
}
if (!userId) {
return res.status(401).json({ error: "Authentication required" });
}
const tunnelName = tunnelConfig.name;
if (!tunnelConfig || !tunnelConfig.name) {
return res.status(400).json({ error: "Invalid tunnel configuration" });
}
cleanupTunnelResources(tunnelName);
const tunnelName = tunnelConfig.name;
manualDisconnects.delete(tunnelName);
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
try {
if (!validateTunnelConfig(tunnelName, tunnelConfig)) {
tunnelLogger.error(`Tunnel config validation failed`, {
operation: "tunnel_connect",
tunnelName,
configHostId: tunnelConfig.sourceHostId,
configTunnelIndex: tunnelConfig.tunnelIndex,
});
return res.status(400).json({
error: "Tunnel configuration does not match tunnel name",
});
}
tunnelConfigs.set(tunnelName, tunnelConfig);
if (tunnelConfig.sourceHostId) {
const accessInfo = await permissionManager.canAccessHost(
userId,
tunnelConfig.sourceHostId,
"read",
);
connectSSHTunnel(tunnelConfig, 0).catch((error) => {
tunnelLogger.error(
`Failed to connect tunnel ${tunnelConfig.name}: ${error instanceof Error ? error.message : "Unknown error"}`,
);
});
if (!accessInfo.hasAccess) {
tunnelLogger.warn("User attempted tunnel connect without access", {
operation: "tunnel_connect_unauthorized",
userId,
hostId: tunnelConfig.sourceHostId,
tunnelName,
});
return res.status(403).json({ error: "Access denied to this host" });
}
res.json({ message: "Connection request received", tunnelName });
});
if (accessInfo.isShared && !accessInfo.isOwner) {
tunnelConfig.requestingUserId = userId;
}
}
app.post("/ssh/tunnel/disconnect", (req, res) => {
const { tunnelName } = req.body;
if (pendingTunnelOperations.has(tunnelName)) {
try {
await pendingTunnelOperations.get(tunnelName);
} catch (error) {
tunnelLogger.warn(`Previous tunnel operation failed`, { tunnelName });
}
}
if (!tunnelName) {
return res.status(400).json({ error: "Tunnel name required" });
}
const operation = (async () => {
manualDisconnects.delete(tunnelName);
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
manualDisconnects.add(tunnelName);
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
await cleanupTunnelResources(tunnelName);
if (activeRetryTimers.has(tunnelName)) {
clearTimeout(activeRetryTimers.get(tunnelName)!);
activeRetryTimers.delete(tunnelName);
}
if (tunnelConfigs.has(tunnelName)) {
const existingConfig = tunnelConfigs.get(tunnelName);
if (
existingConfig &&
(existingConfig.sourceHostId !== tunnelConfig.sourceHostId ||
existingConfig.tunnelIndex !== tunnelConfig.tunnelIndex)
) {
throw new Error(`Tunnel name collision detected: ${tunnelName}`);
}
}
cleanupTunnelResources(tunnelName, true);
if (!tunnelConfig.endpointIP || !tunnelConfig.endpointUsername) {
try {
const systemCrypto = SystemCrypto.getInstance();
const internalAuthToken = await systemCrypto.getInternalAuthToken();
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.DISCONNECTED,
manualDisconnect: true,
});
const allHostsResponse = await axios.get(
"http://localhost:30001/ssh/db/host/internal/all",
{
headers: {
"Content-Type": "application/json",
"X-Internal-Auth-Token": internalAuthToken,
},
},
);
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
handleDisconnect(tunnelName, tunnelConfig, false);
const allHosts: SSHHost[] = allHostsResponse.data || [];
const endpointHost = allHosts.find(
(h) =>
h.name === tunnelConfig.endpointHost ||
`${h.username}@${h.ip}` === tunnelConfig.endpointHost,
);
setTimeout(() => {
manualDisconnects.delete(tunnelName);
}, 5000);
if (!endpointHost) {
throw new Error(
`Endpoint host '${tunnelConfig.endpointHost}' not found in database`,
);
}
res.json({ message: "Disconnect request received", tunnelName });
});
tunnelConfig.endpointIP = endpointHost.ip;
tunnelConfig.endpointSSHPort = endpointHost.port;
tunnelConfig.endpointUsername = endpointHost.username;
tunnelConfig.endpointPassword = endpointHost.password;
tunnelConfig.endpointAuthMethod = endpointHost.authType;
tunnelConfig.endpointSSHKey = endpointHost.key;
tunnelConfig.endpointKeyPassword = endpointHost.keyPassword;
tunnelConfig.endpointKeyType = endpointHost.keyType;
tunnelConfig.endpointCredentialId = endpointHost.credentialId;
tunnelConfig.endpointUserId = endpointHost.userId;
} catch (resolveError) {
tunnelLogger.error(
"Failed to resolve endpoint host",
resolveError,
{
operation: "tunnel_connect_resolve_endpoint_failed",
tunnelName,
endpointHost: tunnelConfig.endpointHost,
},
);
throw new Error(
`Failed to resolve endpoint host: ${resolveError instanceof Error ? resolveError.message : "Unknown error"}`,
);
}
}
app.post("/ssh/tunnel/cancel", (req, res) => {
const { tunnelName } = req.body;
tunnelConfigs.set(tunnelName, tunnelConfig);
await connectSSHTunnel(tunnelConfig, 0);
})();
if (!tunnelName) {
return res.status(400).json({ error: "Tunnel name required" });
}
pendingTunnelOperations.set(tunnelName, operation);
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
res.json({ message: "Connection request received", tunnelName });
if (activeRetryTimers.has(tunnelName)) {
clearTimeout(activeRetryTimers.get(tunnelName)!);
activeRetryTimers.delete(tunnelName);
}
operation.finally(() => {
pendingTunnelOperations.delete(tunnelName);
});
} catch (error) {
tunnelLogger.error("Failed to process tunnel connect", error, {
operation: "tunnel_connect",
tunnelName,
userId,
});
res.status(500).json({ error: "Failed to connect tunnel" });
}
},
);
if (countdownIntervals.has(tunnelName)) {
clearInterval(countdownIntervals.get(tunnelName)!);
countdownIntervals.delete(tunnelName);
}
app.post(
"/ssh/tunnel/disconnect",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const { tunnelName } = req.body;
const userId = req.userId;
cleanupTunnelResources(tunnelName, true);
if (!userId) {
return res.status(401).json({ error: "Authentication required" });
}
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.DISCONNECTED,
manualDisconnect: true,
});
if (!tunnelName) {
return res.status(400).json({ error: "Tunnel name required" });
}
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
handleDisconnect(tunnelName, tunnelConfig, false);
try {
const config = tunnelConfigs.get(tunnelName);
if (config && config.sourceHostId) {
const accessInfo = await permissionManager.canAccessHost(
userId,
config.sourceHostId,
"read",
);
if (!accessInfo.hasAccess) {
return res.status(403).json({ error: "Access denied" });
}
}
setTimeout(() => {
manualDisconnects.delete(tunnelName);
}, 5000);
manualDisconnects.add(tunnelName);
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
res.json({ message: "Cancel request received", tunnelName });
});
if (activeRetryTimers.has(tunnelName)) {
clearTimeout(activeRetryTimers.get(tunnelName)!);
activeRetryTimers.delete(tunnelName);
}
await cleanupTunnelResources(tunnelName, true);
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.DISCONNECTED,
manualDisconnect: true,
});
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
handleDisconnect(tunnelName, tunnelConfig, false);
setTimeout(() => {
manualDisconnects.delete(tunnelName);
}, 5000);
res.json({ message: "Disconnect request received", tunnelName });
} catch (error) {
tunnelLogger.error("Failed to disconnect tunnel", error, {
operation: "tunnel_disconnect",
tunnelName,
userId,
});
res.status(500).json({ error: "Failed to disconnect tunnel" });
}
},
);
app.post(
"/ssh/tunnel/cancel",
authenticateJWT,
async (req: AuthenticatedRequest, res: Response) => {
const { tunnelName } = req.body;
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Authentication required" });
}
if (!tunnelName) {
return res.status(400).json({ error: "Tunnel name required" });
}
try {
const config = tunnelConfigs.get(tunnelName);
if (config && config.sourceHostId) {
const accessInfo = await permissionManager.canAccessHost(
userId,
config.sourceHostId,
"read",
);
if (!accessInfo.hasAccess) {
return res.status(403).json({ error: "Access denied" });
}
}
retryCounters.delete(tunnelName);
retryExhaustedTunnels.delete(tunnelName);
if (activeRetryTimers.has(tunnelName)) {
clearTimeout(activeRetryTimers.get(tunnelName)!);
activeRetryTimers.delete(tunnelName);
}
if (countdownIntervals.has(tunnelName)) {
clearInterval(countdownIntervals.get(tunnelName)!);
countdownIntervals.delete(tunnelName);
}
await cleanupTunnelResources(tunnelName, true);
broadcastTunnelStatus(tunnelName, {
connected: false,
status: CONNECTION_STATES.DISCONNECTED,
manualDisconnect: true,
});
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
handleDisconnect(tunnelName, tunnelConfig, false);
setTimeout(() => {
manualDisconnects.delete(tunnelName);
}, 5000);
res.json({ message: "Cancel request received", tunnelName });
} catch (error) {
tunnelLogger.error("Failed to cancel tunnel retry", error, {
operation: "tunnel_cancel",
tunnelName,
userId,
});
res.status(500).json({ error: "Failed to cancel tunnel retry" });
}
},
);
async function initializeAutoStartTunnels(): Promise<void> {
try {
@@ -1408,12 +1795,19 @@ async function initializeAutoStartTunnels(): Promise<void> {
);
if (endpointHost) {
const tunnelIndex =
host.tunnelConnections.indexOf(tunnelConnection);
const tunnelConfig: TunnelConfig = {
name: `${host.name || `${host.username}@${host.ip}`}_${
tunnelConnection.sourcePort
}_${tunnelConnection.endpointHost}_${
tunnelConnection.endpointPort
}`,
name: normalizeTunnelName(
host.id,
tunnelIndex,
host.name || `${host.username}@${host.ip}`,
tunnelConnection.sourcePort,
tunnelConnection.endpointHost,
tunnelConnection.endpointPort,
),
sourceHostId: host.id,
tunnelIndex: tunnelIndex,
hostName: host.name || `${host.username}@${host.ip}`,
sourceIP: host.ip,
sourceSSHPort: host.port,
@@ -1429,6 +1823,7 @@ async function initializeAutoStartTunnels(): Promise<void> {
endpointIP: endpointHost.ip,
endpointSSHPort: endpointHost.port,
endpointUsername: endpointHost.username,
endpointHost: tunnelConnection.endpointHost,
endpointPassword:
tunnelConnection.endpointPassword ||
endpointHost.autostartPassword ||
@@ -1453,6 +1848,11 @@ async function initializeAutoStartTunnels(): Promise<void> {
retryInterval: tunnelConnection.retryInterval * 1000,
autoStart: tunnelConnection.autoStart,
isPinned: host.pin,
useSocks5: host.useSocks5,
socks5Host: host.socks5Host,
socks5Port: host.socks5Port,
socks5Username: host.socks5Username,
socks5Password: host.socks5Password,
};
autoStartTunnels.push(tunnelConfig);

View File

@@ -3,28 +3,87 @@ import type { Client } from "ssh2";
export function execCommand(
client: Client,
command: string,
timeoutMs = 30000,
): Promise<{
stdout: string;
stderr: string;
code: number | null;
}> {
return new Promise((resolve, reject) => {
client.exec(command, { pty: false }, (err, stream) => {
if (err) return reject(err);
let settled = false;
let stream: any = null;
const timeout = setTimeout(() => {
if (!settled) {
settled = true;
cleanup();
reject(new Error(`Command timeout after ${timeoutMs}ms: ${command}`));
}
}, timeoutMs);
const cleanup = () => {
clearTimeout(timeout);
if (stream) {
try {
stream.removeAllListeners();
if (stream.stderr) {
stream.stderr.removeAllListeners();
}
stream.destroy();
} catch (error) {
// Ignore cleanup errors
}
}
};
client.exec(command, { pty: false }, (err, _stream) => {
if (err) {
if (!settled) {
settled = true;
cleanup();
reject(err);
}
return;
}
stream = _stream;
let stdout = "";
let stderr = "";
let exitCode: number | null = null;
stream
.on("close", (code: number | undefined) => {
exitCode = typeof code === "number" ? code : null;
resolve({ stdout, stderr, code: exitCode });
if (!settled) {
settled = true;
exitCode = typeof code === "number" ? code : null;
cleanup();
resolve({ stdout, stderr, code: exitCode });
}
})
.on("data", (data: Buffer) => {
stdout += data.toString("utf8");
})
.stderr.on("data", (data: Buffer) => {
stderr += data.toString("utf8");
.on("error", (streamErr: Error) => {
if (!settled) {
settled = true;
cleanup();
reject(streamErr);
}
});
if (stream.stderr) {
stream.stderr
.on("data", (data: Buffer) => {
stderr += data.toString("utf8");
})
.on("error", (stderrErr: Error) => {
if (!settled) {
settled = true;
cleanup();
reject(stderrErr);
}
});
}
});
});
}

View File

@@ -26,12 +26,20 @@ export async function collectCpuMetrics(client: Client): Promise<{
let loadTriplet: [number, number, number] | null = null;
try {
const [stat1, loadAvgOut, coresOut] = await Promise.all([
execCommand(client, "cat /proc/stat"),
execCommand(client, "cat /proc/loadavg"),
execCommand(
client,
"nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
const [stat1, loadAvgOut, coresOut] = await Promise.race([
Promise.all([
execCommand(client, "cat /proc/stat"),
execCommand(client, "cat /proc/loadavg"),
execCommand(
client,
"nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
),
]),
new Promise<never>((_, reject) =>
setTimeout(
() => reject(new Error("CPU metrics collection timeout")),
25000,
),
),
]);

View File

@@ -0,0 +1,254 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import type {
FirewallMetrics,
FirewallChain,
FirewallRule,
} from "../../../types/stats-widgets.js";
function parseIptablesRule(line: string): FirewallRule | null {
if (!line.startsWith("-A ")) return null;
const rule: FirewallRule = {
chain: "",
target: "",
protocol: "all",
source: "0.0.0.0/0",
destination: "0.0.0.0/0",
};
const chainMatch = line.match(/^-A\s+(\S+)/);
if (chainMatch) {
rule.chain = chainMatch[1];
}
const targetMatch = line.match(/-j\s+(\S+)/);
if (targetMatch) {
rule.target = targetMatch[1];
}
const protocolMatch = line.match(/-p\s+(\S+)/);
if (protocolMatch) {
rule.protocol = protocolMatch[1];
}
const sourceMatch = line.match(/-s\s+(\S+)/);
if (sourceMatch) {
rule.source = sourceMatch[1];
}
const destMatch = line.match(/-d\s+(\S+)/);
if (destMatch) {
rule.destination = destMatch[1];
}
const dportMatch = line.match(/--dport\s+(\S+)/);
if (dportMatch) {
rule.dport = dportMatch[1];
}
const sportMatch = line.match(/--sport\s+(\S+)/);
if (sportMatch) {
rule.sport = sportMatch[1];
}
const stateMatch = line.match(/--state\s+(\S+)/);
if (stateMatch) {
rule.state = stateMatch[1];
}
const interfaceMatch = line.match(/-i\s+(\S+)/);
if (interfaceMatch) {
rule.interface = interfaceMatch[1];
}
return rule;
}
function parseIptablesOutput(output: string): FirewallChain[] {
const chains: Map<string, FirewallChain> = new Map();
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
const policyMatch = trimmed.match(/^:(\S+)\s+(\S+)/);
if (policyMatch) {
const [, chainName, policy] = policyMatch;
chains.set(chainName, {
name: chainName,
policy: policy,
rules: [],
});
continue;
}
const rule = parseIptablesRule(trimmed);
if (rule) {
let chain = chains.get(rule.chain);
if (!chain) {
chain = {
name: rule.chain,
policy: "ACCEPT",
rules: [],
};
chains.set(rule.chain, chain);
}
chain.rules.push(rule);
}
}
return Array.from(chains.values());
}
function parseNftablesOutput(output: string): FirewallChain[] {
const chains: FirewallChain[] = [];
let currentChain: FirewallChain | null = null;
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
const chainMatch = trimmed.match(
/chain\s+(\S+)\s*\{?\s*(?:type\s+\S+\s+hook\s+(\S+))?/,
);
if (chainMatch) {
if (currentChain) {
chains.push(currentChain);
}
currentChain = {
name: chainMatch[1].toUpperCase(),
policy: "ACCEPT",
rules: [],
};
continue;
}
if (currentChain && trimmed.startsWith("policy ")) {
const policyMatch = trimmed.match(/policy\s+(\S+)/);
if (policyMatch) {
currentChain.policy = policyMatch[1].toUpperCase();
}
continue;
}
if (currentChain && trimmed && !trimmed.startsWith("}")) {
const rule: FirewallRule = {
chain: currentChain.name,
target: "",
protocol: "all",
source: "0.0.0.0/0",
destination: "0.0.0.0/0",
};
if (trimmed.includes("accept")) rule.target = "ACCEPT";
else if (trimmed.includes("drop")) rule.target = "DROP";
else if (trimmed.includes("reject")) rule.target = "REJECT";
const tcpMatch = trimmed.match(/tcp\s+dport\s+(\S+)/);
if (tcpMatch) {
rule.protocol = "tcp";
rule.dport = tcpMatch[1];
}
const udpMatch = trimmed.match(/udp\s+dport\s+(\S+)/);
if (udpMatch) {
rule.protocol = "udp";
rule.dport = udpMatch[1];
}
const saddrMatch = trimmed.match(/saddr\s+(\S+)/);
if (saddrMatch) {
rule.source = saddrMatch[1];
}
const daddrMatch = trimmed.match(/daddr\s+(\S+)/);
if (daddrMatch) {
rule.destination = daddrMatch[1];
}
const iifMatch = trimmed.match(/iif\s+"?(\S+)"?/);
if (iifMatch) {
rule.interface = iifMatch[1].replace(/"/g, "");
}
const ctStateMatch = trimmed.match(/ct\s+state\s+(\S+)/);
if (ctStateMatch) {
rule.state = ctStateMatch[1].toUpperCase();
}
if (rule.target) {
currentChain.rules.push(rule);
}
}
if (trimmed === "}") {
if (currentChain) {
chains.push(currentChain);
currentChain = null;
}
}
}
if (currentChain) {
chains.push(currentChain);
}
return chains;
}
export async function collectFirewallMetrics(
client: Client,
): Promise<FirewallMetrics> {
try {
const iptablesResult = await execCommand(
client,
"iptables-save 2>/dev/null",
15000,
);
if (iptablesResult.stdout && iptablesResult.stdout.includes("*filter")) {
const chains = parseIptablesOutput(iptablesResult.stdout);
const hasRules = chains.some((c) => c.rules.length > 0);
return {
type: "iptables",
status: hasRules ? "active" : "inactive",
chains: chains.filter(
(c) =>
c.name === "INPUT" || c.name === "OUTPUT" || c.name === "FORWARD",
),
};
}
const nftResult = await execCommand(
client,
"nft list ruleset 2>/dev/null",
15000,
);
if (nftResult.stdout && nftResult.stdout.trim()) {
const chains = parseNftablesOutput(nftResult.stdout);
const hasRules = chains.some((c) => c.rules.length > 0);
return {
type: "nftables",
status: hasRules ? "active" : "inactive",
chains,
};
}
return {
type: "none",
status: "unknown",
chains: [],
};
} catch {
return {
type: "none",
status: "unknown",
chains: [],
};
}
}

View File

@@ -1,5 +1,6 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import { statsLogger } from "../../utils/logger.js";
export interface LoginRecord {
user: string;
@@ -46,10 +47,20 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
const timeStr = parts.slice(timeStart, timeStart + 5).join(" ");
if (user && user !== "wtmp" && tty !== "system") {
let parsedTime: string;
try {
const date = new Date(timeStr);
parsedTime = isNaN(date.getTime())
? new Date().toISOString()
: date.toISOString();
} catch (e) {
parsedTime = new Date().toISOString();
}
recentLogins.push({
user,
ip,
time: new Date(timeStr).toISOString(),
time: parsedTime,
status: "success",
});
if (ip !== "local") {
@@ -59,9 +70,7 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
}
}
}
} catch (e) {
// Ignore errors
}
} catch (e) {}
try {
const failedOut = await execCommand(
@@ -96,12 +105,20 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
}
if (user && ip) {
let parsedTime: string;
try {
const date = timeStr ? new Date(timeStr) : new Date();
parsedTime = isNaN(date.getTime())
? new Date().toISOString()
: date.toISOString();
} catch (e) {
parsedTime = new Date().toISOString();
}
failedLogins.push({
user,
ip,
time: timeStr
? new Date(timeStr).toISOString()
: new Date().toISOString(),
time: parsedTime,
status: "failed",
});
if (ip !== "unknown") {
@@ -109,9 +126,7 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
}
}
}
} catch (e) {
// Ignore errors
}
} catch (e) {}
return {
recentLogins: recentLogins.slice(0, 10),

View File

@@ -68,12 +68,7 @@ export async function collectNetworkMetrics(client: Client): Promise<{
txBytes: null,
});
}
} catch (e) {
statsLogger.debug("Failed to collect network interface stats", {
operation: "network_stats_failed",
error: e instanceof Error ? e.message : String(e),
});
}
} catch (e) {}
return { interfaces };
}

View File

@@ -0,0 +1,155 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
import type { PortsMetrics, ListeningPort } from "../../../types/stats-widgets.js";
function parseSsOutput(output: string): ListeningPort[] {
const ports: ListeningPort[] = [];
const lines = output.split("\n").slice(1);
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const parts = trimmed.split(/\s+/);
if (parts.length < 5) continue;
const protocol = parts[0]?.toLowerCase();
if (protocol !== "tcp" && protocol !== "udp") continue;
const state = parts[1];
const localAddr = parts[4];
if (!localAddr) continue;
const lastColon = localAddr.lastIndexOf(":");
if (lastColon === -1) continue;
const address = localAddr.substring(0, lastColon);
const portStr = localAddr.substring(lastColon + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) continue;
const portEntry: ListeningPort = {
protocol: protocol as "tcp" | "udp",
localAddress: address.replace(/^\[|\]$/g, ""),
localPort: port,
state: protocol === "tcp" ? state : undefined,
};
const processInfo = parts[6];
if (processInfo && processInfo.startsWith("users:")) {
const pidMatch = processInfo.match(/pid=(\d+)/);
const nameMatch = processInfo.match(/\("([^"]+)"/);
if (pidMatch) portEntry.pid = parseInt(pidMatch[1], 10);
if (nameMatch) portEntry.process = nameMatch[1];
}
ports.push(portEntry);
}
return ports;
}
function parseNetstatOutput(output: string): ListeningPort[] {
const ports: ListeningPort[] = [];
const lines = output.split("\n");
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const parts = trimmed.split(/\s+/);
if (parts.length < 4) continue;
const proto = parts[0]?.toLowerCase();
if (!proto) continue;
let protocol: "tcp" | "udp";
if (proto.startsWith("tcp")) {
protocol = "tcp";
} else if (proto.startsWith("udp")) {
protocol = "udp";
} else {
continue;
}
const localAddr = parts[3];
if (!localAddr) continue;
const lastColon = localAddr.lastIndexOf(":");
if (lastColon === -1) continue;
const address = localAddr.substring(0, lastColon);
const portStr = localAddr.substring(lastColon + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) continue;
const portEntry: ListeningPort = {
protocol,
localAddress: address,
localPort: port,
};
if (protocol === "tcp" && parts.length >= 6) {
portEntry.state = parts[5];
}
const pidProgram = parts[parts.length - 1];
if (pidProgram && pidProgram.includes("/")) {
const [pidStr, process] = pidProgram.split("/");
const pid = parseInt(pidStr, 10);
if (!isNaN(pid)) portEntry.pid = pid;
if (process) portEntry.process = process;
}
ports.push(portEntry);
}
return ports;
}
export async function collectPortsMetrics(
client: Client,
): Promise<PortsMetrics> {
try {
const ssResult = await execCommand(
client,
"ss -tulnp 2>/dev/null",
15000,
);
if (ssResult.stdout && ssResult.stdout.includes("Local")) {
const ports = parseSsOutput(ssResult.stdout);
return {
source: "ss",
ports: ports.sort((a, b) => a.localPort - b.localPort),
};
}
const netstatResult = await execCommand(
client,
"netstat -tulnp 2>/dev/null",
15000,
);
if (netstatResult.stdout && netstatResult.stdout.includes("Local")) {
const ports = parseNetstatOutput(netstatResult.stdout);
return {
source: "netstat",
ports: ports.sort((a, b) => a.localPort - b.localPort),
};
}
return {
source: "none",
ports: [],
};
} catch {
return {
source: "none",
ports: [],
};
}
}

View File

@@ -33,11 +33,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
for (let i = 1; i < Math.min(psLines.length, 11); i++) {
const parts = psLines[i].split(/\s+/);
if (parts.length >= 11) {
const cpuVal = Number(parts[2]);
const memVal = Number(parts[3]);
topProcesses.push({
pid: parts[1],
user: parts[0],
cpu: parts[2],
mem: parts[3],
cpu: Number.isFinite(cpuVal) ? cpuVal.toString() : "0",
mem: Number.isFinite(memVal) ? memVal.toString() : "0",
command: parts.slice(10).join(" ").substring(0, 50),
});
}
@@ -46,14 +48,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
const procCount = await execCommand(client, "ps aux | wc -l");
const runningCount = await execCommand(client, "ps aux | grep -c ' R '");
totalProcesses = Number(procCount.stdout.trim()) - 1;
runningProcesses = Number(runningCount.stdout.trim());
} catch (e) {
statsLogger.debug("Failed to collect process stats", {
operation: "process_stats_failed",
error: e instanceof Error ? e.message : String(e),
});
}
const totalCount = Number(procCount.stdout.trim()) - 1;
totalProcesses = Number.isFinite(totalCount) ? totalCount : null;
const runningCount2 = Number(runningCount.stdout.trim());
runningProcesses = Number.isFinite(runningCount2) ? runningCount2 : null;
} catch (e) {}
return {
total: totalProcesses,

View File

@@ -23,10 +23,7 @@ export async function collectSystemMetrics(client: Client): Promise<{
kernel = kernelOut.stdout.trim() || null;
os = osOut.stdout.trim() || null;
} catch (e) {
statsLogger.debug("Failed to collect system info", {
operation: "system_info_failed",
error: e instanceof Error ? e.message : String(e),
});
// No error log
}
return {

View File

@@ -21,12 +21,7 @@ export async function collectUptimeMetrics(client: Client): Promise<{
uptimeFormatted = `${days}d ${hours}h ${minutes}m`;
}
}
} catch (e) {
statsLogger.debug("Failed to collect uptime", {
operation: "uptime_failed",
error: e instanceof Error ? e.message : String(e),
});
}
} catch (e) {}
return {
seconds: uptimeSeconds,

View File

@@ -102,6 +102,8 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
await import("./ssh/tunnel.js");
await import("./ssh/file-manager.js");
await import("./ssh/server-stats.js");
await import("./ssh/docker.js");
await import("./ssh/docker-console.js");
await import("./dashboard.js");
process.on("SIGINT", () => {

View File

@@ -154,9 +154,8 @@ class AuthManager {
return;
}
const { getSqlite, saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { getSqlite, saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
const sqlite = getSqlite();
@@ -169,6 +168,23 @@ class AuthManager {
if (migrationResult.migrated) {
await saveMemoryDatabaseToFile();
}
try {
const { CredentialSystemEncryptionMigration } =
await import("./credential-system-encryption-migration.js");
const credMigration = new CredentialSystemEncryptionMigration();
const credResult = await credMigration.migrateUserCredentials(userId);
if (credResult.migrated > 0) {
await saveMemoryDatabaseToFile();
}
} catch (error) {
databaseLogger.warn("Credential migration failed during login", {
operation: "login_credential_migration_failed",
userId,
error: error instanceof Error ? error.message : "Unknown error",
});
}
} catch (error) {
databaseLogger.error("Lazy encryption migration failed", error, {
operation: "lazy_encryption_migration_error",
@@ -231,9 +247,8 @@ class AuthManager {
});
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -334,9 +349,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -387,9 +401,8 @@ class AuthManager {
}
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -430,9 +443,8 @@ class AuthManager {
.where(sql`${sessions.expiresAt} < datetime('now')`);
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -568,9 +580,8 @@ class AuthManager {
.where(eq(sessions.id, payload.sessionId))
.then(async () => {
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
const remainingSessions = await db
@@ -714,9 +725,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
const { saveMemoryDatabaseToFile } =
await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(

View File

@@ -0,0 +1,131 @@
import { db } from "../database/db/index.js";
import { sshCredentials } from "../database/db/schema.js";
import { eq, and, or, isNull } from "drizzle-orm";
import { DataCrypto } from "./data-crypto.js";
import { SystemCrypto } from "./system-crypto.js";
import { FieldCrypto } from "./field-crypto.js";
import { databaseLogger } from "./logger.js";
export class CredentialSystemEncryptionMigration {
async migrateUserCredentials(userId: string): Promise<{
migrated: number;
failed: number;
skipped: number;
}> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
throw new Error("User must be logged in to migrate credentials");
}
const systemCrypto = SystemCrypto.getInstance();
const CSKEK = await systemCrypto.getCredentialSharingKey();
const credentials = await db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.userId, userId),
or(
isNull(sshCredentials.systemPassword),
isNull(sshCredentials.systemKey),
isNull(sshCredentials.systemKeyPassword),
),
),
);
let migrated = 0;
let failed = 0;
const skipped = 0;
for (const cred of credentials) {
try {
const plainPassword = cred.password
? FieldCrypto.decryptField(
cred.password,
userDEK,
cred.id.toString(),
"password",
)
: null;
const plainKey = cred.key
? FieldCrypto.decryptField(
cred.key,
userDEK,
cred.id.toString(),
"key",
)
: null;
const plainKeyPassword = cred.key_password
? FieldCrypto.decryptField(
cred.key_password,
userDEK,
cred.id.toString(),
"key_password",
)
: null;
const systemPassword = plainPassword
? FieldCrypto.encryptField(
plainPassword,
CSKEK,
cred.id.toString(),
"password",
)
: null;
const systemKey = plainKey
? FieldCrypto.encryptField(
plainKey,
CSKEK,
cred.id.toString(),
"key",
)
: null;
const systemKeyPassword = plainKeyPassword
? FieldCrypto.encryptField(
plainKeyPassword,
CSKEK,
cred.id.toString(),
"key_password",
)
: null;
await db
.update(sshCredentials)
.set({
systemPassword,
systemKey,
systemKeyPassword,
updatedAt: new Date().toISOString(),
})
.where(eq(sshCredentials.id, cred.id));
migrated++;
} catch (error) {
databaseLogger.error("Failed to migrate credential", error, {
credentialId: cred.id,
userId,
});
failed++;
}
}
return { migrated, failed, skipped };
} catch (error) {
databaseLogger.error(
"Credential system encryption migration failed",
error,
{
operation: "credential_migration_failed",
userId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
throw error;
}
}
}

View File

@@ -475,6 +475,52 @@ class DataCrypto {
return false;
}
}
/**
* Encrypt sensitive credential fields with system key for offline sharing
* Returns an object with systemPassword, systemKey, systemKeyPassword fields
*/
static async encryptRecordWithSystemKey<T extends Record<string, unknown>>(
tableName: string,
record: T,
systemKey: Buffer,
): Promise<Partial<T>> {
const systemEncrypted: Record<string, unknown> = {};
const recordId = record.id || "temp-" + Date.now();
if (tableName !== "ssh_credentials") {
return systemEncrypted as Partial<T>;
}
if (record.password && typeof record.password === "string") {
systemEncrypted.systemPassword = FieldCrypto.encryptField(
record.password as string,
systemKey,
recordId as string,
"password",
);
}
if (record.key && typeof record.key === "string") {
systemEncrypted.systemKey = FieldCrypto.encryptField(
record.key as string,
systemKey,
recordId as string,
"key",
);
}
if (record.key_password && typeof record.key_password === "string") {
systemEncrypted.systemKeyPassword = FieldCrypto.encryptField(
record.key_password as string,
systemKey,
recordId as string,
"key_password",
);
}
return systemEncrypted as Partial<T>;
}
}
export { DataCrypto };

View File

@@ -327,11 +327,7 @@ class DatabaseFileEncryption {
fs.accessSync(envPath, fs.constants.R_OK);
envFileReadable = true;
}
} catch (error) {
databaseLogger.debug("Operation failed, continuing", {
error: error instanceof Error ? error.message : String(error),
});
}
} catch (error) {}
databaseLogger.error(
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",

View File

@@ -36,7 +36,7 @@ const SENSITIVE_FIELDS = [
const TRUNCATE_FIELDS = ["data", "content", "body", "response", "request"];
class Logger {
export class Logger {
private serviceName: string;
private serviceIcon: string;
private serviceColor: string;

View File

@@ -0,0 +1,436 @@
import type { Request, Response, NextFunction } from "express";
import { db } from "../database/db/index.js";
import {
hostAccess,
roles,
userRoles,
sshData,
users,
} from "../database/db/schema.js";
import { eq, and, or, isNull, gte, sql } from "drizzle-orm";
import { databaseLogger } from "./logger.js";
interface AuthenticatedRequest extends Request {
userId?: string;
dataKey?: Buffer;
}
interface HostAccessInfo {
hasAccess: boolean;
isOwner: boolean;
isShared: boolean;
permissionLevel?: "view";
expiresAt?: string | null;
}
interface PermissionCheckResult {
allowed: boolean;
reason?: string;
}
class PermissionManager {
private static instance: PermissionManager;
private permissionCache: Map<
string,
{ permissions: string[]; timestamp: number }
>;
private readonly CACHE_TTL = 5 * 60 * 1000;
private constructor() {
this.permissionCache = new Map();
setInterval(() => {
this.cleanupExpiredAccess().catch((error) => {
databaseLogger.error(
"Failed to run periodic host access cleanup",
error,
{
operation: "host_access_cleanup_periodic",
},
);
});
}, 60 * 1000);
setInterval(() => {
this.clearPermissionCache();
}, this.CACHE_TTL);
}
static getInstance(): PermissionManager {
if (!this.instance) {
this.instance = new PermissionManager();
}
return this.instance;
}
/**
* Clean up expired host access entries
*/
private async cleanupExpiredAccess(): Promise<void> {
try {
const now = new Date().toISOString();
const result = await db
.delete(hostAccess)
.where(
and(
sql`${hostAccess.expiresAt} IS NOT NULL`,
sql`${hostAccess.expiresAt} <= ${now}`,
),
)
.returning({ id: hostAccess.id });
} catch (error) {
databaseLogger.error("Failed to cleanup expired host access", error, {
operation: "host_access_cleanup_failed",
});
}
}
/**
* Clear permission cache
*/
private clearPermissionCache(): void {
this.permissionCache.clear();
}
/**
* Invalidate permission cache for a specific user
*/
invalidateUserPermissionCache(userId: string): void {
this.permissionCache.delete(userId);
}
/**
* Get user permissions from roles
*/
async getUserPermissions(userId: string): Promise<string[]> {
const cached = this.permissionCache.get(userId);
if (cached && Date.now() - cached.timestamp < this.CACHE_TTL) {
return cached.permissions;
}
try {
const userRoleRecords = await db
.select({
permissions: roles.permissions,
})
.from(userRoles)
.innerJoin(roles, eq(userRoles.roleId, roles.id))
.where(eq(userRoles.userId, userId));
const allPermissions = new Set<string>();
for (const record of userRoleRecords) {
try {
const permissions = JSON.parse(record.permissions) as string[];
for (const perm of permissions) {
allPermissions.add(perm);
}
} catch (parseError) {
databaseLogger.warn("Failed to parse role permissions", {
operation: "get_user_permissions",
userId,
error: parseError,
});
}
}
const permissionsArray = Array.from(allPermissions);
this.permissionCache.set(userId, {
permissions: permissionsArray,
timestamp: Date.now(),
});
return permissionsArray;
} catch (error) {
databaseLogger.error("Failed to get user permissions", error, {
operation: "get_user_permissions",
userId,
});
return [];
}
}
/**
* Check if user has a specific permission
* Supports wildcards: "hosts.*", "*"
*/
async hasPermission(userId: string, permission: string): Promise<boolean> {
const userPermissions = await this.getUserPermissions(userId);
if (userPermissions.includes("*")) {
return true;
}
if (userPermissions.includes(permission)) {
return true;
}
const parts = permission.split(".");
for (let i = parts.length; i > 0; i--) {
const wildcardPermission = parts.slice(0, i).join(".") + ".*";
if (userPermissions.includes(wildcardPermission)) {
return true;
}
}
return false;
}
/**
* Check if user can access a specific host
*/
async canAccessHost(
userId: string,
hostId: number,
action: "read" | "write" | "execute" | "delete" | "share" = "read",
): Promise<HostAccessInfo> {
try {
const host = await db
.select()
.from(sshData)
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
.limit(1);
if (host.length > 0) {
return {
hasAccess: true,
isOwner: true,
isShared: false,
};
}
const userRoleIds = await db
.select({ roleId: userRoles.roleId })
.from(userRoles)
.where(eq(userRoles.userId, userId));
const roleIds = userRoleIds.map((r) => r.roleId);
const now = new Date().toISOString();
const sharedAccess = await db
.select()
.from(hostAccess)
.where(
and(
eq(hostAccess.hostId, hostId),
or(
eq(hostAccess.userId, userId),
roleIds.length > 0
? sql`${hostAccess.roleId} IN (${sql.join(
roleIds.map((id) => sql`${id}`),
sql`, `,
)})`
: sql`false`,
),
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
),
)
.limit(1);
if (sharedAccess.length > 0) {
const access = sharedAccess[0];
if (action === "write" || action === "delete") {
return {
hasAccess: false,
isOwner: false,
isShared: true,
permissionLevel: access.permissionLevel as "view",
expiresAt: access.expiresAt,
};
}
try {
await db
.update(hostAccess)
.set({
lastAccessedAt: now,
})
.where(eq(hostAccess.id, access.id));
} catch (error) {
databaseLogger.warn("Failed to update host access timestamp", {
operation: "update_host_access_timestamp",
error,
});
}
return {
hasAccess: true,
isOwner: false,
isShared: true,
permissionLevel: access.permissionLevel as "view",
expiresAt: access.expiresAt,
};
}
return {
hasAccess: false,
isOwner: false,
isShared: false,
};
} catch (error) {
databaseLogger.error("Failed to check host access", error, {
operation: "can_access_host",
userId,
hostId,
action,
});
return {
hasAccess: false,
isOwner: false,
isShared: false,
};
}
}
/**
* Check if user is admin (backward compatibility)
*/
async isAdmin(userId: string): Promise<boolean> {
try {
const user = await db
.select({ isAdmin: users.is_admin })
.from(users)
.where(eq(users.id, userId))
.limit(1);
if (user.length > 0 && user[0].isAdmin) {
return true;
}
const adminRoles = await db
.select({ roleName: roles.name })
.from(userRoles)
.innerJoin(roles, eq(userRoles.roleId, roles.id))
.where(
and(
eq(userRoles.userId, userId),
or(eq(roles.name, "admin"), eq(roles.name, "super_admin")),
),
);
return adminRoles.length > 0;
} catch (error) {
databaseLogger.error("Failed to check admin status", error, {
operation: "is_admin",
userId,
});
return false;
}
}
/**
* Middleware: Require specific permission
*/
requirePermission(permission: string) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const hasPermission = await this.hasPermission(userId, permission);
if (!hasPermission) {
databaseLogger.warn("Permission denied", {
operation: "permission_check",
userId,
permission,
path: req.path,
});
return res.status(403).json({
error: "Insufficient permissions",
required: permission,
});
}
next();
};
}
/**
* Middleware: Require host access
*/
requireHostAccess(
hostIdParam: string = "id",
action: "read" | "write" | "execute" | "delete" | "share" = "read",
) {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const hostId = parseInt(req.params[hostIdParam], 10);
if (isNaN(hostId)) {
return res.status(400).json({ error: "Invalid host ID" });
}
const accessInfo = await this.canAccessHost(userId, hostId, action);
if (!accessInfo.hasAccess) {
databaseLogger.warn("Host access denied", {
operation: "host_access_check",
userId,
hostId,
action,
});
return res.status(403).json({
error: "Access denied to host",
hostId,
action,
});
}
(req as any).hostAccessInfo = accessInfo;
next();
};
}
/**
* Middleware: Require admin role (backward compatible)
*/
requireAdmin() {
return async (
req: AuthenticatedRequest,
res: Response,
next: NextFunction,
) => {
const userId = req.userId;
if (!userId) {
return res.status(401).json({ error: "Not authenticated" });
}
const isAdmin = await this.isAdmin(userId);
if (!isAdmin) {
databaseLogger.warn("Admin access denied", {
operation: "admin_check",
userId,
path: req.path,
});
return res.status(403).json({ error: "Admin access required" });
}
next();
};
}
}
export { PermissionManager };
export type { AuthenticatedRequest, HostAccessInfo, PermissionCheckResult };

View File

@@ -0,0 +1,700 @@
import { db } from "../database/db/index.js";
import {
sharedCredentials,
sshCredentials,
hostAccess,
users,
userRoles,
sshData,
} from "../database/db/schema.js";
import { eq, and } from "drizzle-orm";
import { DataCrypto } from "./data-crypto.js";
import { FieldCrypto } from "./field-crypto.js";
import { databaseLogger } from "./logger.js";
interface CredentialData {
username: string;
authType: string;
password?: string;
key?: string;
keyPassword?: string;
keyType?: string;
}
/**
* Manages shared credentials for RBAC host sharing.
* Creates per-user encrypted credential copies to enable credential sharing
* without requiring the credential owner to be online.
*/
class SharedCredentialManager {
private static instance: SharedCredentialManager;
private constructor() {}
static getInstance(): SharedCredentialManager {
if (!this.instance) {
this.instance = new SharedCredentialManager();
}
return this.instance;
}
/**
* Create shared credential for a specific user
* Called when sharing a host with a user
*/
async createSharedCredentialForUser(
hostAccessId: number,
originalCredentialId: number,
targetUserId: string,
ownerId: string,
): Promise<void> {
try {
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
if (ownerDEK) {
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
if (!targetDEK) {
await this.createPendingSharedCredential(
hostAccessId,
originalCredentialId,
targetUserId,
);
return;
}
const credentialData = await this.getDecryptedCredential(
originalCredentialId,
ownerId,
ownerDEK,
);
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
targetUserId,
targetDEK,
hostAccessId,
);
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
...encryptedForTarget,
needsReEncryption: false,
});
} else {
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
if (!targetDEK) {
await this.createPendingSharedCredential(
hostAccessId,
originalCredentialId,
targetUserId,
);
return;
}
const credentialData =
await this.getDecryptedCredentialViaSystemKey(originalCredentialId);
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
targetUserId,
targetDEK,
hostAccessId,
);
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
...encryptedForTarget,
needsReEncryption: false,
});
}
} catch (error) {
databaseLogger.error("Failed to create shared credential", error, {
operation: "create_shared_credential",
hostAccessId,
targetUserId,
});
throw error;
}
}
/**
* Create shared credentials for all users in a role
* Called when sharing a host with a role
*/
async createSharedCredentialsForRole(
hostAccessId: number,
originalCredentialId: number,
roleId: number,
ownerId: string,
): Promise<void> {
try {
const roleUsers = await db
.select({ userId: userRoles.userId })
.from(userRoles)
.where(eq(userRoles.roleId, roleId));
for (const { userId } of roleUsers) {
try {
await this.createSharedCredentialForUser(
hostAccessId,
originalCredentialId,
userId,
ownerId,
);
} catch (error) {
databaseLogger.error(
"Failed to create shared credential for role member",
error,
{
operation: "create_shared_credentials_role",
hostAccessId,
roleId,
userId,
},
);
}
}
} catch (error) {
databaseLogger.error(
"Failed to create shared credentials for role",
error,
{
operation: "create_shared_credentials_role",
hostAccessId,
roleId,
},
);
throw error;
}
}
/**
* Get credential data for a shared user
* Called when a shared user connects to a host
*/
async getSharedCredentialForUser(
hostId: number,
userId: string,
): Promise<CredentialData | null> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
throw new Error(`User ${userId} data not unlocked`);
}
const sharedCred = await db
.select()
.from(sharedCredentials)
.innerJoin(
hostAccess,
eq(sharedCredentials.hostAccessId, hostAccess.id),
)
.where(
and(
eq(hostAccess.hostId, hostId),
eq(sharedCredentials.targetUserId, userId),
),
)
.limit(1);
if (sharedCred.length === 0) {
return null;
}
const cred = sharedCred[0].shared_credentials;
if (cred.needsReEncryption) {
databaseLogger.warn(
"Shared credential needs re-encryption but cannot be accessed yet",
{
operation: "get_shared_credential_pending",
hostId,
userId,
},
);
return null;
}
return this.decryptSharedCredential(cred, userDEK);
} catch (error) {
databaseLogger.error("Failed to get shared credential", error, {
operation: "get_shared_credential",
hostId,
userId,
});
throw error;
}
}
/**
* Update all shared credentials when original credential is updated
* Called when credential owner updates credential
*/
async updateSharedCredentialsForOriginal(
credentialId: number,
ownerId: string,
): Promise<void> {
try {
const sharedCreds = await db
.select()
.from(sharedCredentials)
.where(eq(sharedCredentials.originalCredentialId, credentialId));
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
let credentialData: CredentialData;
if (ownerDEK) {
credentialData = await this.getDecryptedCredential(
credentialId,
ownerId,
ownerDEK,
);
} else {
try {
credentialData =
await this.getDecryptedCredentialViaSystemKey(credentialId);
} catch (error) {
databaseLogger.warn(
"Cannot update shared credentials: owner offline and credential not migrated",
{
operation: "update_shared_credentials_failed",
credentialId,
ownerId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
await db
.update(sharedCredentials)
.set({ needsReEncryption: true })
.where(eq(sharedCredentials.originalCredentialId, credentialId));
return;
}
}
for (const sharedCred of sharedCreds) {
const targetDEK = DataCrypto.getUserDataKey(sharedCred.targetUserId);
if (!targetDEK) {
await db
.update(sharedCredentials)
.set({ needsReEncryption: true })
.where(eq(sharedCredentials.id, sharedCred.id));
continue;
}
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
sharedCred.targetUserId,
targetDEK,
sharedCred.hostAccessId,
);
await db
.update(sharedCredentials)
.set({
...encryptedForTarget,
needsReEncryption: false,
updatedAt: new Date().toISOString(),
})
.where(eq(sharedCredentials.id, sharedCred.id));
}
} catch (error) {
databaseLogger.error("Failed to update shared credentials", error, {
operation: "update_shared_credentials",
credentialId,
});
}
}
/**
* Delete shared credentials when original credential is deleted
* Called from credential deletion route
*/
async deleteSharedCredentialsForOriginal(
credentialId: number,
): Promise<void> {
try {
const result = await db
.delete(sharedCredentials)
.where(eq(sharedCredentials.originalCredentialId, credentialId))
.returning({ id: sharedCredentials.id });
} catch (error) {
databaseLogger.error("Failed to delete shared credentials", error, {
operation: "delete_shared_credentials",
credentialId,
});
}
}
/**
* Re-encrypt pending shared credentials for a user when they log in
* Called during user login
*/
async reEncryptPendingCredentialsForUser(userId: string): Promise<void> {
try {
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
return;
}
const pendingCreds = await db
.select()
.from(sharedCredentials)
.where(
and(
eq(sharedCredentials.targetUserId, userId),
eq(sharedCredentials.needsReEncryption, true),
),
);
for (const cred of pendingCreds) {
await this.reEncryptSharedCredential(cred.id, userId);
}
} catch (error) {
databaseLogger.error("Failed to re-encrypt pending credentials", error, {
operation: "reencrypt_pending_credentials",
userId,
});
}
}
private async getDecryptedCredential(
credentialId: number,
ownerId: string,
ownerDEK: Buffer,
): Promise<CredentialData> {
const creds = await db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, ownerId),
),
)
.limit(1);
if (creds.length === 0) {
throw new Error(`Credential ${credentialId} not found`);
}
const cred = creds[0];
return {
username: cred.username,
authType: cred.authType,
password: cred.password
? this.decryptField(cred.password, ownerDEK, credentialId, "password")
: undefined,
key: cred.key
? this.decryptField(cred.key, ownerDEK, credentialId, "key")
: undefined,
keyPassword: cred.key_password
? this.decryptField(
cred.key_password,
ownerDEK,
credentialId,
"key_password",
)
: undefined,
keyType: cred.keyType,
};
}
/**
* Decrypt credential using system key (for offline sharing when owner is offline)
*/
private async getDecryptedCredentialViaSystemKey(
credentialId: number,
): Promise<CredentialData> {
const creds = await db
.select()
.from(sshCredentials)
.where(eq(sshCredentials.id, credentialId))
.limit(1);
if (creds.length === 0) {
throw new Error(`Credential ${credentialId} not found`);
}
const cred = creds[0];
if (!cred.systemPassword && !cred.systemKey && !cred.systemKeyPassword) {
throw new Error(
"Credential not yet migrated for offline sharing. " +
"Please ask credential owner to log in to enable sharing.",
);
}
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const CSKEK = await systemCrypto.getCredentialSharingKey();
return {
username: cred.username,
authType: cred.authType,
password: cred.systemPassword
? this.decryptField(
cred.systemPassword,
CSKEK,
credentialId,
"password",
)
: undefined,
key: cred.systemKey
? this.decryptField(cred.systemKey, CSKEK, credentialId, "key")
: undefined,
keyPassword: cred.systemKeyPassword
? this.decryptField(
cred.systemKeyPassword,
CSKEK,
credentialId,
"key_password",
)
: undefined,
keyType: cred.keyType,
};
}
private encryptCredentialForUser(
credentialData: CredentialData,
targetUserId: string,
targetDEK: Buffer,
hostAccessId: number,
): {
encryptedUsername: string;
encryptedAuthType: string;
encryptedPassword: string | null;
encryptedKey: string | null;
encryptedKeyPassword: string | null;
encryptedKeyType: string | null;
} {
const recordId = `shared-${hostAccessId}-${targetUserId}`;
return {
encryptedUsername: FieldCrypto.encryptField(
credentialData.username,
targetDEK,
recordId,
"username",
),
encryptedAuthType: credentialData.authType,
encryptedPassword: credentialData.password
? FieldCrypto.encryptField(
credentialData.password,
targetDEK,
recordId,
"password",
)
: null,
encryptedKey: credentialData.key
? FieldCrypto.encryptField(
credentialData.key,
targetDEK,
recordId,
"key",
)
: null,
encryptedKeyPassword: credentialData.keyPassword
? FieldCrypto.encryptField(
credentialData.keyPassword,
targetDEK,
recordId,
"key_password",
)
: null,
encryptedKeyType: credentialData.keyType || null,
};
}
private decryptSharedCredential(
sharedCred: typeof sharedCredentials.$inferSelect,
userDEK: Buffer,
): CredentialData {
const recordId = `shared-${sharedCred.hostAccessId}-${sharedCred.targetUserId}`;
return {
username: FieldCrypto.decryptField(
sharedCred.encryptedUsername,
userDEK,
recordId,
"username",
),
authType: sharedCred.encryptedAuthType,
password: sharedCred.encryptedPassword
? FieldCrypto.decryptField(
sharedCred.encryptedPassword,
userDEK,
recordId,
"password",
)
: undefined,
key: sharedCred.encryptedKey
? FieldCrypto.decryptField(
sharedCred.encryptedKey,
userDEK,
recordId,
"key",
)
: undefined,
keyPassword: sharedCred.encryptedKeyPassword
? FieldCrypto.decryptField(
sharedCred.encryptedKeyPassword,
userDEK,
recordId,
"key_password",
)
: undefined,
keyType: sharedCred.encryptedKeyType || undefined,
};
}
private decryptField(
encryptedValue: string,
dek: Buffer,
recordId: number | string,
fieldName: string,
): string {
try {
return FieldCrypto.decryptField(
encryptedValue,
dek,
recordId.toString(),
fieldName,
);
} catch (error) {
databaseLogger.warn("Field decryption failed, returning as-is", {
operation: "decrypt_field",
fieldName,
recordId,
});
return encryptedValue;
}
}
private async createPendingSharedCredential(
hostAccessId: number,
originalCredentialId: number,
targetUserId: string,
): Promise<void> {
await db.insert(sharedCredentials).values({
hostAccessId,
originalCredentialId,
targetUserId,
encryptedUsername: "",
encryptedAuthType: "",
needsReEncryption: true,
});
databaseLogger.info("Created pending shared credential", {
operation: "create_pending_shared_credential",
hostAccessId,
targetUserId,
});
}
private async reEncryptSharedCredential(
sharedCredId: number,
userId: string,
): Promise<void> {
try {
const sharedCred = await db
.select()
.from(sharedCredentials)
.where(eq(sharedCredentials.id, sharedCredId))
.limit(1);
if (sharedCred.length === 0) {
databaseLogger.warn("Re-encrypt: shared credential not found", {
operation: "reencrypt_not_found",
sharedCredId,
});
return;
}
const cred = sharedCred[0];
const access = await db
.select()
.from(hostAccess)
.innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
.where(eq(hostAccess.id, cred.hostAccessId))
.limit(1);
if (access.length === 0) {
databaseLogger.warn("Re-encrypt: host access not found", {
operation: "reencrypt_access_not_found",
sharedCredId,
});
return;
}
const ownerId = access[0].ssh_data.userId;
const userDEK = DataCrypto.getUserDataKey(userId);
if (!userDEK) {
databaseLogger.warn("Re-encrypt: user DEK not available", {
operation: "reencrypt_user_offline",
sharedCredId,
userId,
});
return;
}
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
let credentialData: CredentialData;
if (ownerDEK) {
credentialData = await this.getDecryptedCredential(
cred.originalCredentialId,
ownerId,
ownerDEK,
);
} else {
try {
credentialData = await this.getDecryptedCredentialViaSystemKey(
cred.originalCredentialId,
);
} catch (error) {
databaseLogger.warn(
"Re-encrypt: system key decryption failed, credential may not be migrated yet",
{
operation: "reencrypt_system_key_failed",
sharedCredId,
error: error instanceof Error ? error.message : "Unknown error",
},
);
return;
}
}
const encryptedForTarget = this.encryptCredentialForUser(
credentialData,
userId,
userDEK,
cred.hostAccessId,
);
await db
.update(sharedCredentials)
.set({
...encryptedForTarget,
needsReEncryption: false,
updatedAt: new Date().toISOString(),
})
.where(eq(sharedCredentials.id, sharedCredId));
} catch (error) {
databaseLogger.error("Failed to re-encrypt shared credential", error, {
operation: "reencrypt_shared_credential",
sharedCredId,
userId,
});
}
}
}
export { SharedCredentialManager };

View File

@@ -2,7 +2,12 @@ import { getDb, DatabaseSaveTrigger } from "../database/db/index.js";
import { DataCrypto } from "./data-crypto.js";
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity";
type TableName =
| "users"
| "ssh_data"
| "ssh_credentials"
| "recent_activity"
| "socks5_proxy_presets";
class SimpleDBOps {
static async insert<T extends Record<string, unknown>>(
@@ -23,6 +28,20 @@ class SimpleDBOps {
userDataKey,
);
if (tableName === "ssh_credentials") {
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const systemKey = await systemCrypto.getCredentialSharingKey();
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
tableName,
dataWithTempId,
systemKey,
);
Object.assign(encryptedData, systemEncrypted);
}
if (!data.id) {
delete encryptedData.id;
}
@@ -105,6 +124,20 @@ class SimpleDBOps {
userDataKey,
);
if (tableName === "ssh_credentials") {
const { SystemCrypto } = await import("./system-crypto.js");
const systemCrypto = SystemCrypto.getInstance();
const systemKey = await systemCrypto.getCredentialSharingKey();
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
tableName,
data,
systemKey,
);
Object.assign(encryptedData, systemEncrypted);
}
const result = await getDb()
.update(table)
.set(encryptedData)

View File

@@ -0,0 +1,131 @@
import { SocksClient } from "socks";
import type { SocksClientOptions } from "socks";
import net from "net";
import { sshLogger } from "./logger.js";
import type { ProxyNode } from "../../types/index.js";
export interface SOCKS5Config {
useSocks5?: boolean;
socks5Host?: string;
socks5Port?: number;
socks5Username?: string;
socks5Password?: string;
socks5ProxyChain?: ProxyNode[];
}
/**
* Creates a SOCKS5 connection through a single proxy or a chain of proxies
* @param targetHost - Target SSH server hostname/IP
* @param targetPort - Target SSH server port
* @param socks5Config - SOCKS5 proxy configuration
* @returns Promise with connected socket or null if SOCKS5 is not enabled
*/
export async function createSocks5Connection(
targetHost: string,
targetPort: number,
socks5Config: SOCKS5Config,
): Promise<net.Socket | null> {
if (!socks5Config.useSocks5) {
return null;
}
if (
socks5Config.socks5ProxyChain &&
socks5Config.socks5ProxyChain.length > 0
) {
return createProxyChainConnection(
targetHost,
targetPort,
socks5Config.socks5ProxyChain,
);
}
if (socks5Config.socks5Host) {
return createSingleProxyConnection(targetHost, targetPort, socks5Config);
}
return null;
}
/**
* Creates a connection through a single SOCKS proxy
*/
async function createSingleProxyConnection(
targetHost: string,
targetPort: number,
socks5Config: SOCKS5Config,
): Promise<net.Socket> {
const socksOptions: SocksClientOptions = {
proxy: {
host: socks5Config.socks5Host!,
port: socks5Config.socks5Port || 1080,
type: 5,
userId: socks5Config.socks5Username,
password: socks5Config.socks5Password,
},
command: "connect",
destination: {
host: targetHost,
port: targetPort,
},
};
try {
const info = await SocksClient.createConnection(socksOptions);
return info.socket;
} catch (error) {
sshLogger.error("SOCKS5 connection failed", error, {
operation: "socks5_connect_failed",
proxyHost: socks5Config.socks5Host,
proxyPort: socks5Config.socks5Port || 1080,
targetHost,
targetPort,
errorMessage: error instanceof Error ? error.message : "Unknown error",
});
throw error;
}
}
/**
* Creates a connection through a chain of SOCKS proxies
* Each proxy in the chain connects through the previous one
*/
async function createProxyChainConnection(
targetHost: string,
targetPort: number,
proxyChain: ProxyNode[],
): Promise<net.Socket> {
if (proxyChain.length === 0) {
throw new Error("Proxy chain is empty");
}
const chainPath = proxyChain.map((p) => `${p.host}:${p.port}`).join(" → ");
try {
const info = await SocksClient.createConnectionChain({
proxies: proxyChain.map((p) => ({
host: p.host,
port: p.port,
type: p.type,
userId: p.username,
password: p.password,
timeout: 10000,
})),
command: "connect",
destination: {
host: targetHost,
port: targetPort,
},
});
return info.socket;
} catch (error) {
sshLogger.error("SOCKS proxy chain connection failed", error, {
operation: "socks5_chain_connect_failed",
chainLength: proxyChain.length,
targetHost,
targetPort,
errorMessage: error instanceof Error ? error.message : "Unknown error",
});
throw error;
}
}

View File

@@ -8,6 +8,7 @@ class SystemCrypto {
private jwtSecret: string | null = null;
private databaseKey: Buffer | null = null;
private internalAuthToken: string | null = null;
private credentialSharingKey: Buffer | null = null;
private constructor() {}
@@ -158,6 +159,48 @@ class SystemCrypto {
return this.internalAuthToken!;
}
async initializeCredentialSharingKey(): Promise<void> {
try {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const envKey = process.env.CREDENTIAL_SHARING_KEY;
if (envKey && envKey.length >= 64) {
this.credentialSharingKey = Buffer.from(envKey, "hex");
return;
}
try {
const envContent = await fs.readFile(envPath, "utf8");
const csKeyMatch = envContent.match(/^CREDENTIAL_SHARING_KEY=(.+)$/m);
if (csKeyMatch && csKeyMatch[1] && csKeyMatch[1].length >= 64) {
this.credentialSharingKey = Buffer.from(csKeyMatch[1], "hex");
process.env.CREDENTIAL_SHARING_KEY = csKeyMatch[1];
return;
}
} catch (fileError) {}
await this.generateAndGuideCredentialSharingKey();
} catch (error) {
databaseLogger.error(
"Failed to initialize credential sharing key",
error,
{
operation: "cred_sharing_key_init_failed",
dataDir: process.env.DATA_DIR || "./db/data",
},
);
throw new Error("Credential sharing key initialization failed");
}
}
async getCredentialSharingKey(): Promise<Buffer> {
if (!this.credentialSharingKey) {
await this.initializeCredentialSharingKey();
}
return this.credentialSharingKey!;
}
private async generateAndGuideUser(): Promise<void> {
const newSecret = crypto.randomBytes(32).toString("hex");
const instanceId = crypto.randomBytes(8).toString("hex");
@@ -210,6 +253,26 @@ class SystemCrypto {
);
}
private async generateAndGuideCredentialSharingKey(): Promise<void> {
const newKey = crypto.randomBytes(32);
const newKeyHex = newKey.toString("hex");
const instanceId = crypto.randomBytes(8).toString("hex");
this.credentialSharingKey = newKey;
await this.updateEnvFile("CREDENTIAL_SHARING_KEY", newKeyHex);
databaseLogger.success(
"Credential sharing key auto-generated and saved to .env",
{
operation: "cred_sharing_key_auto_generated",
instanceId,
envVarName: "CREDENTIAL_SHARING_KEY",
note: "Used for offline credential sharing - no restart required",
},
);
}
async validateJWTSecret(): Promise<boolean> {
try {
const secret = await this.getJWTSecret();

View File

@@ -177,15 +177,33 @@ class UserDataImport {
continue;
}
const tempId = `import-ssh-${targetUserId}-${Date.now()}-${imported}`;
const existing = await getDb()
.select()
.from(sshData)
.where(
and(
eq(sshData.userId, targetUserId),
eq(sshData.ip, host.ip as string),
eq(sshData.port, host.port as number),
eq(sshData.username, host.username as string),
),
);
if (existing.length > 0 && !options.replaceExisting) {
skipped++;
continue;
}
const newHostData = {
...host,
id: tempId,
userId: targetUserId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
if (existing.length === 0) {
newHostData.createdAt = new Date().toISOString();
}
let processedHostData = newHostData;
if (options.userDataKey) {
processedHostData = DataCrypto.encryptRecord(
@@ -198,9 +216,18 @@ class UserDataImport {
delete processedHostData.id;
await getDb()
.insert(sshData)
.values(processedHostData as unknown as typeof sshData.$inferInsert);
if (existing.length > 0 && options.replaceExisting) {
await getDb()
.update(sshData)
.set(processedHostData as unknown as typeof sshData.$inferInsert)
.where(eq(sshData.id, existing[0].id));
} else {
await getDb()
.insert(sshData)
.values(
processedHostData as unknown as typeof sshData.$inferInsert,
);
}
imported++;
} catch (error) {
errors.push(
@@ -233,17 +260,33 @@ class UserDataImport {
continue;
}
const tempCredId = `import-cred-${targetUserId}-${Date.now()}-${imported}`;
const existing = await getDb()
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.userId, targetUserId),
eq(sshCredentials.name, credential.name as string),
),
);
if (existing.length > 0 && !options.replaceExisting) {
skipped++;
continue;
}
const newCredentialData = {
...credential,
id: tempCredId,
userId: targetUserId,
usageCount: 0,
lastUsed: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
if (existing.length === 0) {
newCredentialData.usageCount = 0;
newCredentialData.lastUsed = null;
newCredentialData.createdAt = new Date().toISOString();
}
let processedCredentialData = newCredentialData;
if (options.userDataKey) {
processedCredentialData = DataCrypto.encryptRecord(
@@ -256,11 +299,20 @@ class UserDataImport {
delete processedCredentialData.id;
await getDb()
.insert(sshCredentials)
.values(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
);
if (existing.length > 0 && options.replaceExisting) {
await getDb()
.update(sshCredentials)
.set(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
)
.where(eq(sshCredentials.id, existing[0].id));
} else {
await getDb()
.insert(sshCredentials)
.values(
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
);
}
imported++;
} catch (error) {
errors.push(

View File

@@ -0,0 +1,155 @@
import * as React from "react";
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
import { cn } from "@/lib/utils";
import { buttonVariants } from "@/components/ui/button";
function AlertDialog({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Root>) {
return <AlertDialogPrimitive.Root data-slot="alert-dialog" {...props} />;
}
function AlertDialogTrigger({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Trigger>) {
return (
<AlertDialogPrimitive.Trigger data-slot="alert-dialog-trigger" {...props} />
);
}
function AlertDialogPortal({
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Portal>) {
return (
<AlertDialogPrimitive.Portal data-slot="alert-dialog-portal" {...props} />
);
}
function AlertDialogOverlay({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Overlay>) {
return (
<AlertDialogPrimitive.Overlay
data-slot="alert-dialog-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
className,
)}
{...props}
/>
);
}
function AlertDialogContent({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Content>) {
return (
<AlertDialogPortal>
<AlertDialogOverlay />
<AlertDialogPrimitive.Content
data-slot="alert-dialog-content"
className={cn(
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-50 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
className,
)}
{...props}
/>
</AlertDialogPortal>
);
}
function AlertDialogHeader({
className,
...props
}: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-dialog-header"
className={cn("flex flex-col gap-2 text-center sm:text-left", className)}
{...props}
/>
);
}
function AlertDialogFooter({
className,
...props
}: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-dialog-footer"
className={cn(
"flex flex-col-reverse gap-2 sm:flex-row sm:justify-end",
className,
)}
{...props}
/>
);
}
function AlertDialogTitle({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Title>) {
return (
<AlertDialogPrimitive.Title
data-slot="alert-dialog-title"
className={cn("text-lg font-semibold", className)}
{...props}
/>
);
}
function AlertDialogDescription({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Description>) {
return (
<AlertDialogPrimitive.Description
data-slot="alert-dialog-description"
className={cn("text-muted-foreground text-sm", className)}
{...props}
/>
);
}
function AlertDialogAction({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Action>) {
return (
<AlertDialogPrimitive.Action
className={cn(buttonVariants(), className)}
{...props}
/>
);
}
function AlertDialogCancel({
className,
...props
}: React.ComponentProps<typeof AlertDialogPrimitive.Cancel>) {
return (
<AlertDialogPrimitive.Cancel
className={cn(buttonVariants({ variant: "outline" }), className)}
{...props}
/>
);
}
export {
AlertDialog,
AlertDialogPortal,
AlertDialogOverlay,
AlertDialogTrigger,
AlertDialogContent,
AlertDialogHeader,
AlertDialogFooter,
AlertDialogTitle,
AlertDialogDescription,
AlertDialogAction,
AlertDialogCancel,
};

View File

@@ -15,7 +15,7 @@ const badgeVariants = cva(
secondary:
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
destructive:
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
"border-transparent bg-destructive text-foreground [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
},

View File

@@ -13,7 +13,7 @@ const buttonVariants = cva(
default:
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
destructive:
"bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
"bg-destructive text-foreground shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
secondary:

View File

@@ -7,7 +7,7 @@ function Card({ className, ...props }: React.ComponentProps<"div">) {
<div
data-slot="card"
className={cn(
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
"bg-elevated text-foreground flex flex-col gap-6 rounded-lg border-2 border-edge py-6 shadow-sm",
className,
)}
{...props}

View File

@@ -90,7 +90,7 @@ function CommandList({
<CommandPrimitive.List
data-slot="command-list"
className={cn(
"max-h-[300px] scroll-py-1 overflow-x-hidden overflow-y-auto",
"max-h-[300px] scroll-py-1 overflow-x-hidden overflow-y-auto thin-scrollbar",
className,
)}
{...props}

View File

@@ -36,7 +36,7 @@ function DialogOverlay({
<DialogPrimitive.Overlay
data-slot="dialog-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50",
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
className,
)}
{...props}

View File

@@ -8,7 +8,7 @@ function Input({ className, type, ...props }: React.ComponentProps<"input">) {
type={type}
data-slot="input"
className={cn(
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground bg-elevated dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
"focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px]",
"aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
className,

View File

@@ -37,13 +37,13 @@ function ResizableHandle({
<ResizablePrimitive.PanelResizeHandle
data-slot="resizable-handle"
className={cn(
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed transition-colors duration-150",
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-edge-hover hover:bg-interact active:bg-pressed transition-colors duration-150",
className,
)}
{...props}
>
{withHandle && (
<div className="bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
<div className="bg-edge-hover hover:bg-interact active:bg-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
<GripVerticalIcon className="size-2.5" />
</div>
)}

View File

@@ -59,7 +59,7 @@ function SelectContent({
<SelectPrimitive.Content
data-slot="select-content"
className={cn(
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border shadow-md",
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto thin-scrollbar rounded-md border shadow-md",
position === "popper" &&
"data-[side=bottom]:translate-y-1 data-[side=left]:-translate-x-1 data-[side=right]:translate-x-1 data-[side=top]:-translate-y-1",
className,

View File

@@ -34,7 +34,7 @@ function SheetOverlay({
<SheetPrimitive.Overlay
data-slot="sheet-overlay"
className={cn(
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50 data-[state=closed]:pointer-events-none",
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay data-[state=closed]:pointer-events-none",
className,
)}
{...props}

View File

@@ -365,7 +365,7 @@ function SidebarContent({ className, ...props }: React.ComponentProps<"div">) {
data-slot="sidebar-content"
data-sidebar="content"
className={cn(
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden",
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto thin-scrollbar group-data-[collapsible=icon]:overflow-hidden",
className,
)}
{...props}

View File

@@ -51,7 +51,7 @@ function Slider({
<SliderPrimitive.Thumb
data-slot="slider-thumb"
key={index}
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-white shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-elevated shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
/>
))}
</SliderPrimitive.Root>

View File

@@ -1,4 +1,4 @@
import { useTheme } from "next-themes";
import { useTheme } from "@/components/theme-provider";
import { Toaster as Sonner, type ToasterProps, toast } from "sonner";
import { useRef } from "react";

View File

@@ -6,7 +6,7 @@ function Table({ className, ...props }: React.ComponentProps<"table">) {
return (
<div
data-slot="table-container"
className="relative w-full overflow-x-auto"
className="relative w-full overflow-x-auto thin-scrollbar"
>
<table
data-slot="table"

View File

@@ -9,7 +9,7 @@ const Textarea = React.forwardRef<HTMLTextAreaElement, TextareaProps>(
return (
<textarea
className={cn(
"flex min-h-[80px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50",
"flex min-h-[80px] w-full rounded-md border border-input bg-transparent dark:bg-input/30 px-3 py-2 text-sm shadow-xs transition-[color,box-shadow] duration-200 outline-none placeholder:text-muted-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50 disabled:pointer-events-none",
className,
)}
ref={ref}

View File

@@ -56,6 +56,62 @@ export const TERMINAL_THEMES: Record<string, TerminalTheme> = {
},
},
termixDark: {
name: "Termix Dark",
category: "dark",
colors: {
background: "#18181b",
foreground: "#f7f7f7",
cursor: "#f7f7f7",
cursorAccent: "#18181b",
selectionBackground: "#3a3a3d",
black: "#2e3436",
red: "#cc0000",
green: "#4e9a06",
yellow: "#c4a000",
blue: "#3465a4",
magenta: "#75507b",
cyan: "#06989a",
white: "#d3d7cf",
brightBlack: "#555753",
brightRed: "#ef2929",
brightGreen: "#8ae234",
brightYellow: "#fce94f",
brightBlue: "#729fcf",
brightMagenta: "#ad7fa8",
brightCyan: "#34e2e2",
brightWhite: "#eeeeec",
},
},
termixLight: {
name: "Termix Light",
category: "light",
colors: {
background: "#ffffff",
foreground: "#18181b",
cursor: "#18181b",
cursorAccent: "#ffffff",
selectionBackground: "#d1d5db",
black: "#18181b",
red: "#dc2626",
green: "#16a34a",
yellow: "#ca8a04",
blue: "#2563eb",
magenta: "#9333ea",
cyan: "#0891b2",
white: "#f4f4f5",
brightBlack: "#71717a",
brightRed: "#ef4444",
brightGreen: "#22c55e",
brightYellow: "#eab308",
brightBlue: "#3b82f6",
brightMagenta: "#a855f7",
brightCyan: "#06b6d4",
brightWhite: "#ffffff",
},
},
dracula: {
name: "Dracula",
category: "dark",
@@ -689,7 +745,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
fontSize: 14,
fontFamily: "Caskaydia Cove Nerd Font Mono",
letterSpacing: 0,
lineHeight: 1.2,
lineHeight: 1.0,
theme: "termix",
scrollback: 10000,
@@ -705,6 +761,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
startupSnippetId: null as number | null,
autoMosh: false,
moshCommand: "mosh-server new -s -l LANG=en_US.UTF-8",
sudoPasswordAutoFill: false,
};
export type TerminalConfigType = typeof DEFAULT_TERMINAL_CONFIG;

View File

@@ -1,4 +1,4 @@
import { useState } from "react";
import { useState, useEffect, useCallback } from "react";
import { toast } from "sonner";
interface ConfirmationOptions {
@@ -9,10 +9,47 @@ interface ConfirmationOptions {
variant?: "default" | "destructive";
}
interface ToastConfirmOptions {
confirmOnEnter?: boolean;
duration?: number;
}
export function useConfirmation() {
const [isOpen, setIsOpen] = useState(false);
const [options, setOptions] = useState<ConfirmationOptions | null>(null);
const [onConfirm, setOnConfirm] = useState<(() => void) | null>(null);
const [activeToastId, setActiveToastId] = useState<string | number | null>(null);
const [pendingConfirmCallback, setPendingConfirmCallback] = useState<(() => void) | null>(null);
const [pendingResolve, setPendingResolve] = useState<((value: boolean) => void) | null>(null);
const handleEnterKey = useCallback((event: KeyboardEvent) => {
if (event.key === "Enter" && activeToastId !== null) {
event.preventDefault();
event.stopPropagation();
if (pendingConfirmCallback) {
pendingConfirmCallback();
}
if (pendingResolve) {
pendingResolve(true);
}
toast.dismiss(activeToastId);
setActiveToastId(null);
setPendingConfirmCallback(null);
setPendingResolve(null);
}
}, [activeToastId, pendingConfirmCallback, pendingResolve]);
useEffect(() => {
if (activeToastId !== null) {
// Use capture phase to intercept Enter before terminal receives it
window.addEventListener("keydown", handleEnterKey, true);
return () => {
window.removeEventListener("keydown", handleEnterKey, true);
};
}
}, [activeToastId, handleEnterKey]);
const confirm = (opts: ConfirmationOptions, callback: () => void) => {
setOptions(opts);
@@ -36,24 +73,69 @@ export function useConfirmation() {
};
const confirmWithToast = (
message: string,
callback: () => void,
variant: "default" | "destructive" = "default",
) => {
const actionText = variant === "destructive" ? "Delete" : "Confirm";
const cancelText = "Cancel";
opts: ConfirmationOptions | string,
callback?: () => void,
variantOrConfirmLabel: "default" | "destructive" | string = "Confirm",
cancelLabel: string = "Cancel",
toastOptions: ToastConfirmOptions = { confirmOnEnter: false },
): Promise<boolean> => {
return new Promise((resolve) => {
const isVariant =
variantOrConfirmLabel === "default" ||
variantOrConfirmLabel === "destructive";
const confirmLabel = isVariant ? "Confirm" : variantOrConfirmLabel;
toast(message, {
action: {
label: actionText,
onClick: callback,
},
cancel: {
label: cancelText,
onClick: () => {},
},
duration: 10000,
className: variant === "destructive" ? "border-red-500" : "",
const { confirmOnEnter = false, duration = 8000 } = toastOptions;
const handleToastConfirm = () => {
if (callback) callback();
resolve(true);
setActiveToastId(null);
setPendingConfirmCallback(null);
setPendingResolve(null);
};
const handleToastCancel = () => {
resolve(false);
setActiveToastId(null);
setPendingConfirmCallback(null);
setPendingResolve(null);
};
const message = typeof opts === "string" ? opts : opts.description;
const actualConfirmLabel = typeof opts === "object" && opts.confirmText ? opts.confirmText : confirmLabel;
const actualCancelLabel = typeof opts === "object" && opts.cancelText ? opts.cancelText : cancelLabel;
const toastId = toast(message, {
duration,
action: {
label: confirmOnEnter ? `${actualConfirmLabel}` : actualConfirmLabel,
onClick: handleToastConfirm,
},
cancel: {
label: actualCancelLabel,
onClick: handleToastCancel,
},
onDismiss: () => {
setActiveToastId(null);
setPendingConfirmCallback(null);
setPendingResolve(null);
},
onAutoClose: () => {
resolve(false);
setActiveToastId(null);
setPendingConfirmCallback(null);
setPendingResolve(null);
},
} as any);
if (confirmOnEnter) {
setActiveToastId(toastId);
setPendingConfirmCallback(() => () => {
if (callback) callback();
});
setPendingResolve(() => resolve);
}
});
};

View File

@@ -0,0 +1,71 @@
import { useEffect, useState, useCallback } from "react";
import { isElectron } from "@/ui/main-axios";
interface ServiceWorkerState {
isSupported: boolean;
isRegistered: boolean;
updateAvailable: boolean;
}
/**
* Hook to manage PWA Service Worker registration.
* Only registers in production web environment (not in Electron).
*/
export function useServiceWorker(): ServiceWorkerState {
const [state, setState] = useState<ServiceWorkerState>({
isSupported: false,
isRegistered: false,
updateAvailable: false,
});
const handleUpdateFound = useCallback(
(registration: ServiceWorkerRegistration) => {
const newWorker = registration.installing;
if (!newWorker) return;
newWorker.addEventListener("statechange", () => {
if (
newWorker.state === "installed" &&
navigator.serviceWorker.controller
) {
setState((prev) => ({ ...prev, updateAvailable: true }));
console.log("[SW] Update available");
}
});
},
[],
);
useEffect(() => {
const isSupported =
"serviceWorker" in navigator && !isElectron() && import.meta.env.PROD;
setState((prev) => ({ ...prev, isSupported }));
if (!isSupported) return;
const registerSW = async () => {
try {
const registration = await navigator.serviceWorker.register("/sw.js");
console.log("[SW] Registered:", registration.scope);
setState((prev) => ({ ...prev, isRegistered: true }));
registration.addEventListener("updatefound", () =>
handleUpdateFound(registration),
);
} catch (error) {
console.error("[SW] Registration failed:", error);
}
};
if (document.readyState === "complete") {
registerSW();
} else {
window.addEventListener("load", registerSW);
return () => window.removeEventListener("load", registerSW);
}
}, [handleUpdateFound]);
return state;
}

View File

@@ -2,18 +2,65 @@ import i18n from "i18next";
import { initReactI18next } from "react-i18next";
import LanguageDetector from "i18next-browser-languagedetector";
import enTranslation from "../locales/en/translation.json";
import zhTranslation from "../locales/zh/translation.json";
import deTranslation from "../locales/de/translation.json";
import ptbrTranslation from "../locales/pt-BR/translation.json";
import ruTranslation from "../locales/ru/translation.json";
import frTranslation from "../locales/fr/translation.json";
import enTranslation from "../locales/en.json";
import zhTranslation from "../locales/zh.json";
import deTranslation from "../locales/de.json";
import ptTranslation from "../locales/pt.json";
import ruTranslation from "../locales/ru.json";
import frTranslation from "../locales/fr.json";
import koTranslation from "../locales/ko.json";
import itTranslation from "../locales/it.json";
import esTranslation from "../locales/es.json";
import hiTranslation from "../locales/hi.json";
import bnTranslation from "../locales/bn.json";
import jaTranslation from "../locales/ja.json";
import viTranslation from "../locales/vi.json";
import trTranslation from "../locales/tr.json";
import heTranslation from "../locales/he.json";
import arTranslation from "../locales/ar.json";
import plTranslation from "../locales/pl.json";
import nlTranslation from "../locales/nl.json";
import svTranslation from "../locales/sv.json";
import idTranslation from "../locales/id.json";
import thTranslation from "../locales/th.json";
import ukTranslation from "../locales/uk.json";
import csTranslation from "../locales/cs.json";
import roTranslation from "../locales/ro.json";
import elTranslation from "../locales/el.json";
import nbTranslation from "../locales/nb.json";
i18n
.use(LanguageDetector)
.use(initReactI18next)
.init({
supportedLngs: ["en", "zh", "de", "ptbr", "ru", "fr"],
supportedLngs: [
"en",
"zh",
"de",
"pt",
"ru",
"fr",
"ko",
"it",
"es",
"hi",
"bn",
"ja",
"vi",
"tr",
"he",
"ar",
"pl",
"nl",
"sv",
"id",
"th",
"uk",
"cs",
"ro",
"el",
"nb",
],
fallbackLng: "en",
debug: false,
@@ -35,8 +82,8 @@ i18n
de: {
translation: deTranslation,
},
ptbr: {
translation: ptbrTranslation,
pt: {
translation: ptTranslation,
},
ru: {
translation: ruTranslation,
@@ -44,6 +91,66 @@ i18n
fr: {
translation: frTranslation,
},
ko: {
translation: koTranslation,
},
it: {
translation: itTranslation,
},
es: {
translation: esTranslation,
},
hi: {
translation: hiTranslation,
},
bn: {
translation: bnTranslation,
},
ja: {
translation: jaTranslation,
},
vi: {
translation: viTranslation,
},
tr: {
translation: trTranslation,
},
he: {
translation: heTranslation,
},
ar: {
translation: arTranslation,
},
pl: {
translation: plTranslation,
},
nl: {
translation: nlTranslation,
},
sv: {
translation: svTranslation,
},
id: {
translation: idTranslation,
},
th: {
translation: thTranslation,
},
uk: {
translation: ukTranslation,
},
cs: {
translation: csTranslation,
},
ro: {
translation: roTranslation,
},
el: {
translation: elTranslation,
},
nb: {
translation: nbTranslation,
},
},
interpolation: {

View File

@@ -8,45 +8,76 @@
font-weight: 400;
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
background-color: #09090b;
color: var(--foreground);
background-color: var(--bg-base);
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
--radius: 0.625rem;
--background: oklch(1 0 0);
--foreground: oklch(0.141 0.005 285.823);
--card: oklch(1 0 0);
--card-foreground: oklch(0.141 0.005 285.823);
--popover: oklch(1 0 0);
--popover-foreground: oklch(0.141 0.005 285.823);
--primary: oklch(0.21 0.006 285.885);
--primary-foreground: oklch(0.985 0 0);
--secondary: oklch(0.967 0.001 286.375);
--secondary-foreground: oklch(0.21 0.006 285.885);
--muted: oklch(0.967 0.001 286.375);
--muted-foreground: oklch(0.552 0.016 285.938);
--accent: oklch(0.967 0.001 286.375);
--accent-foreground: oklch(0.21 0.006 285.885);
--destructive: oklch(0.577 0.245 27.325);
--border: oklch(0.92 0.004 286.32);
--input: oklch(0.92 0.004 286.32);
--ring: oklch(0.705 0.015 286.067);
--chart-1: oklch(0.646 0.222 41.116);
--chart-2: oklch(0.6 0.118 184.704);
--chart-3: oklch(0.398 0.07 227.392);
--chart-4: oklch(0.828 0.189 84.429);
--chart-5: oklch(0.769 0.188 70.08);
--sidebar: oklch(0.985 0 0);
--sidebar-foreground: oklch(0.141 0.005 285.823);
--sidebar-primary: oklch(0.21 0.006 285.885);
--sidebar-primary-foreground: oklch(0.985 0 0);
--sidebar-accent: oklch(0.967 0.001 286.375);
--sidebar-accent-foreground: oklch(0.21 0.006 285.885);
--sidebar-border: oklch(0.92 0.004 286.32);
--sidebar-ring: oklch(0.705 0.015 286.067);
--background: #ffffff;
--foreground: #18181b;
--card: #ffffff;
--card-foreground: #18181b;
--popover: #ffffff;
--popover-foreground: #18181b;
--primary: #27272a;
--primary-foreground: #fafafa;
--secondary: #f4f4f5;
--secondary-foreground: #27272a;
--muted: #f4f4f5;
--muted-foreground: #71717a;
--accent: #f4f4f5;
--accent-foreground: #27272a;
--destructive: #dc2626;
--border: #e4e4e7;
--input: #e4e4e7;
--ring: #a1a1aa;
--chart-1: #e76e50;
--chart-2: #2a9d8f;
--chart-3: #264653;
--chart-4: #e9c46a;
--chart-5: #f4a261;
--sidebar: #f9f9f9;
--sidebar-foreground: #18181b;
--sidebar-primary: #27272a;
--sidebar-primary-foreground: #fafafa;
--sidebar-accent: #f4f4f5;
--sidebar-accent-foreground: #27272a;
--sidebar-border: #e4e4e7;
--sidebar-ring: #a1a1aa;
--bg-base: #fcfcfc;
--bg-elevated: #ffffff;
--bg-surface: #f3f4f6;
--bg-surface-hover: #e5e7eb;
--bg-input: #ffffff;
--bg-deepest: #e5e7eb;
--bg-header: #eeeeef;
--bg-button: #f3f4f6;
--bg-active: #e5e7eb;
--bg-light: #fafafa;
--bg-subtle: #f5f5f5;
--bg-interact: #d1d5db;
--border-base: #e5e7eb;
--border-panel: #d1d5db;
--border-subtle: #f3f4f6;
--border-medium: #d1d5db;
--bg-hover: #f3f4f6;
--bg-hover-alt: #e5e7eb;
--bg-pressed: #d1d5db;
--border-hover: #d1d5db;
--border-active: #9ca3af;
--foreground-secondary: #334155;
--foreground-subtle: #94a3b8;
--scrollbar-thumb: #c1c1c3;
--scrollbar-thumb-hover: #a1a1a3;
--scrollbar-track: #f3f4f6;
--bg-overlay: rgba(0, 0, 0, 0.5);
}
@theme inline {
@@ -107,40 +138,99 @@
--color-dark-bg-panel: #1b1b1e;
--color-dark-border-panel: #222224;
--color-dark-bg-panel-hover: #232327;
--color-canvas: var(--bg-base);
--color-elevated: var(--bg-elevated);
--color-surface: var(--bg-surface);
--color-surface-hover: var(--bg-surface-hover);
--color-field: var(--bg-input);
--color-deepest: var(--bg-deepest);
--color-header: var(--bg-header);
--color-button: var(--bg-button);
--color-active: var(--bg-active);
--color-light: var(--bg-light);
--color-subtle: var(--bg-subtle);
--color-interact: var(--bg-interact);
--color-hover: var(--bg-hover);
--color-hover-alt: var(--bg-hover-alt);
--color-pressed: var(--bg-pressed);
--color-edge: var(--border-base);
--color-edge-panel: var(--border-panel);
--color-edge-subtle: var(--border-subtle);
--color-edge-medium: var(--border-medium);
--color-edge-hover: var(--border-hover);
--color-edge-active: var(--border-active);
--color-foreground-secondary: var(--foreground-secondary);
--color-foreground-subtle: var(--foreground-subtle);
--color-overlay: var(--bg-overlay);
}
.dark {
--background: oklch(0.141 0.005 285.823);
--foreground: oklch(0.985 0 0);
--card: oklch(0.21 0.006 285.885);
--card-foreground: oklch(0.985 0 0);
--popover: oklch(0.21 0.006 285.885);
--popover-foreground: oklch(0.985 0 0);
--primary: oklch(0.92 0.004 286.32);
--primary-foreground: oklch(0.21 0.006 285.885);
--secondary: oklch(0.274 0.006 286.033);
--secondary-foreground: oklch(0.985 0 0);
--muted: oklch(0.274 0.006 286.033);
--muted-foreground: oklch(0.705 0.015 286.067);
--accent: oklch(0.274 0.006 286.033);
--accent-foreground: oklch(0.985 0 0);
--destructive: oklch(0.704 0.191 22.216);
--border: oklch(1 0 0 / 10%);
--input: oklch(1 0 0 / 15%);
--ring: oklch(0.552 0.016 285.938);
--chart-1: oklch(0.488 0.243 264.376);
--chart-2: oklch(0.696 0.17 162.48);
--chart-3: oklch(0.769 0.188 70.08);
--chart-4: oklch(0.627 0.265 303.9);
--chart-5: oklch(0.645 0.246 16.439);
--sidebar: oklch(0.21 0.006 285.885);
--sidebar-foreground: oklch(0.985 0 0);
--sidebar-primary: oklch(0.488 0.243 264.376);
--sidebar-primary-foreground: oklch(0.985 0 0);
--sidebar-accent: oklch(0.274 0.006 286.033);
--sidebar-accent-foreground: oklch(0.985 0 0);
--sidebar-border: oklch(1 0 0 / 10%);
--sidebar-ring: oklch(0.552 0.016 285.938);
--background: #09090b;
--foreground: #fafafa;
--card: #18181b;
--card-foreground: #fafafa;
--popover: #27272a;
--popover-foreground: #fafafa;
--primary: #e4e4e7;
--primary-foreground: #27272a;
--secondary: #3f3f46;
--secondary-foreground: #fafafa;
--muted: #27272a;
--muted-foreground: #9ca3af;
--accent: #3f3f46;
--accent-foreground: #fafafa;
--destructive: #f87171;
--border: #ffffff1a;
--input: #ffffff26;
--ring: #71717a;
--chart-1: #3b82f6;
--chart-2: #34d399;
--chart-3: #f4a261;
--chart-4: #a855f7;
--chart-5: #f43f5e;
--sidebar: #18181b;
--sidebar-foreground: #fafafa;
--sidebar-primary: #3b82f6;
--sidebar-primary-foreground: #fafafa;
--sidebar-accent: #3f3f46;
--sidebar-accent-foreground: #fafafa;
--sidebar-border: #ffffff1a;
--sidebar-ring: #71717a;
--bg-base: #18181b;
--bg-elevated: #0e0e10;
--bg-surface: #1b1b1e;
--bg-surface-hover: #232327;
--bg-input: #222225;
--bg-deepest: #09090b;
--bg-header: #131316;
--bg-button: #23232a;
--bg-active: #1d1d1f;
--bg-light: #141416;
--bg-subtle: #101014;
--bg-interact: #2a2a2c;
--border-base: #303032;
--border-panel: #222224;
--border-subtle: #5a5a5d;
--border-medium: #373739;
--bg-hover: #2d2d30;
--bg-hover-alt: #2a2a2d;
--bg-pressed: #1a1a1c;
--border-hover: #434345;
--border-active: #2d2d30;
--foreground-secondary: #d1d5db;
--foreground-subtle: #6b7280;
--scrollbar-thumb: #434345;
--scrollbar-thumb-hover: #5a5a5d;
--scrollbar-track: #18181b;
--bg-overlay: rgba(0, 0, 0, 0.7);
}
@layer base {
@@ -160,23 +250,7 @@
.thin-scrollbar {
scrollbar-width: thin;
scrollbar-color: #303032 transparent;
}
.thin-scrollbar::-webkit-scrollbar {
height: 6px;
width: 6px;
}
.thin-scrollbar::-webkit-scrollbar-track {
background: transparent;
}
.thin-scrollbar::-webkit-scrollbar-thumb {
background-color: #303032;
border-radius: 9999px;
border: 2px solid transparent;
background-clip: content-box;
scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track);
}
.thin-scrollbar::-webkit-scrollbar {
@@ -185,19 +259,37 @@
}
.thin-scrollbar::-webkit-scrollbar-track {
background: #18181b;
background: var(--scrollbar-track);
}
.thin-scrollbar::-webkit-scrollbar-thumb {
background: #434345;
background: var(--scrollbar-thumb);
border-radius: 3px;
}
.thin-scrollbar::-webkit-scrollbar-thumb:hover {
background: #5a5a5d;
background: var(--scrollbar-thumb-hover);
}
.thin-scrollbar {
.skinny-scrollbar {
scrollbar-width: thin;
scrollbar-color: #434345 #18181b;
scrollbar-color: var(--scrollbar-thumb) transparent;
}
.skinny-scrollbar::-webkit-scrollbar {
width: 4px;
height: 4px;
}
.skinny-scrollbar::-webkit-scrollbar-track {
background: transparent;
}
.skinny-scrollbar::-webkit-scrollbar-thumb {
background: var(--scrollbar-thumb);
border-radius: 2px;
}
.skinny-scrollbar::-webkit-scrollbar-thumb:hover {
background: var(--scrollbar-thumb-hover);
}

View File

@@ -0,0 +1,263 @@
const ANSI_CODES = {
reset: "\x1b[0m",
colors: {
red: "\x1b[31m",
green: "\x1b[32m",
yellow: "\x1b[33m",
blue: "\x1b[34m",
magenta: "\x1b[35m",
cyan: "\x1b[36m",
white: "\x1b[37m",
brightBlack: "\x1b[90m",
brightRed: "\x1b[91m",
brightGreen: "\x1b[92m",
brightYellow: "\x1b[93m",
brightBlue: "\x1b[94m",
brightMagenta: "\x1b[95m",
brightCyan: "\x1b[96m",
brightWhite: "\x1b[97m",
},
styles: {
bold: "\x1b[1m",
dim: "\x1b[2m",
italic: "\x1b[3m",
underline: "\x1b[4m",
},
} as const;
interface HighlightPattern {
name: string;
regex: RegExp;
ansiCode: string;
priority: number;
quickCheck?: string;
}
interface MatchResult {
start: number;
end: number;
ansiCode: string;
priority: number;
}
const MAX_LINE_LENGTH = 5000;
const MAX_ANSI_CODES = 10;
const PATTERNS: HighlightPattern[] = [
{
name: "ipv4",
regex:
/(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(?::\d{1,5})?/g,
ansiCode: ANSI_CODES.colors.magenta,
priority: 10,
},
{
name: "log-error",
regex:
/\b(ERROR|FATAL|CRITICAL|FAIL(?:ED)?|denied|invalid|DENIED)\b|\[ERROR\]/gi,
ansiCode: ANSI_CODES.colors.brightRed,
priority: 9,
},
{
name: "log-warn",
regex: /\b(WARN(?:ING)?|ALERT)\b|\[WARN(?:ING)?\]/gi,
ansiCode: ANSI_CODES.colors.yellow,
priority: 9,
},
{
name: "log-success",
regex:
/\b(SUCCESS|OK|PASS(?:ED)?|COMPLETE(?:D)?|connected|active|up|Up|UP|FULL)\b/gi,
ansiCode: ANSI_CODES.colors.brightGreen,
priority: 8,
},
{
name: "url",
regex: /https?:\/\/[^\s\])}]+/g,
ansiCode: `${ANSI_CODES.colors.blue}${ANSI_CODES.styles.underline}`,
priority: 8,
},
{
name: "path-absolute",
regex: /\/[a-zA-Z][a-zA-Z0-9_\-@.]*(?:\/[a-zA-Z0-9_\-@.]+)+/g,
ansiCode: ANSI_CODES.colors.cyan,
priority: 7,
},
{
name: "path-home",
regex: /~\/[a-zA-Z0-9_\-@./]+/g,
ansiCode: ANSI_CODES.colors.cyan,
priority: 7,
},
{
name: "log-info",
regex: /\bINFO\b|\[INFO\]/gi,
ansiCode: ANSI_CODES.colors.blue,
priority: 6,
},
{
name: "log-debug",
regex: /\b(?:DEBUG|TRACE)\b|\[(?:DEBUG|TRACE)\]/gi,
ansiCode: ANSI_CODES.colors.brightBlack,
priority: 6,
},
];
function hasExistingAnsiCodes(text: string): boolean {
const ansiCount = (
text.match(
/\x1b[\[\]()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nq-uy=><~]/g,
) || []
).length;
return ansiCount > MAX_ANSI_CODES;
}
function hasIncompleteAnsiSequence(text: string): boolean {
return /\x1b(?:\[(?:[0-9;]*)?)?$/.test(text);
}
interface TextSegment {
isAnsi: boolean;
content: string;
}
function parseAnsiSegments(text: string): TextSegment[] {
const segments: TextSegment[] = [];
const ansiRegex = /\x1b(?:[@-Z\\-_]|\[[0-9;]*[@-~])/g;
let lastIndex = 0;
let match;
while ((match = ansiRegex.exec(text)) !== null) {
if (match.index > lastIndex) {
segments.push({
isAnsi: false,
content: text.slice(lastIndex, match.index),
});
}
segments.push({
isAnsi: true,
content: match[0],
});
lastIndex = ansiRegex.lastIndex;
}
if (lastIndex < text.length) {
segments.push({
isAnsi: false,
content: text.slice(lastIndex),
});
}
return segments;
}
function highlightPlainText(text: string): string {
if (text.length > MAX_LINE_LENGTH) {
return text;
}
if (!text.trim()) {
return text;
}
const matches: MatchResult[] = [];
for (const pattern of PATTERNS) {
pattern.regex.lastIndex = 0;
let match;
while ((match = pattern.regex.exec(text)) !== null) {
matches.push({
start: match.index,
end: match.index + match[0].length,
ansiCode: pattern.ansiCode,
priority: pattern.priority,
});
}
}
if (matches.length === 0) {
return text;
}
matches.sort((a, b) => {
if (a.priority !== b.priority) {
return b.priority - a.priority;
}
return a.start - b.start;
});
const appliedRanges: Array<{ start: number; end: number }> = [];
const finalMatches = matches.filter((match) => {
const overlaps = appliedRanges.some(
(range) =>
(match.start >= range.start && match.start < range.end) ||
(match.end > range.start && match.end <= range.end) ||
(match.start <= range.start && match.end >= range.end),
);
if (!overlaps) {
appliedRanges.push({ start: match.start, end: match.end });
return true;
}
return false;
});
let result = text;
finalMatches.reverse().forEach((match) => {
const before = result.slice(0, match.start);
const matched = result.slice(match.start, match.end);
const after = result.slice(match.end);
result = before + match.ansiCode + matched + ANSI_CODES.reset + after;
});
return result;
}
export function highlightTerminalOutput(text: string): string {
if (!text || !text.trim()) {
return text;
}
if (hasIncompleteAnsiSequence(text)) {
return text;
}
if (hasExistingAnsiCodes(text)) {
return text;
}
const segments = parseAnsiSegments(text);
if (segments.length === 0) {
return highlightPlainText(text);
}
const highlightedSegments = segments.map((segment) => {
if (segment.isAnsi) {
return segment.content;
} else {
return highlightPlainText(segment.content);
}
});
return highlightedSegments.join("");
}
export function isSyntaxHighlightingEnabled(): boolean {
try {
return localStorage.getItem("terminalSyntaxHighlighting") === "true";
} catch {
return false;
}
}

2402
src/locales/ar.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/bn.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/cs.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/de.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2402
src/locales/el.json Normal file

File diff suppressed because it is too large Load Diff

2478
src/locales/en.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/es.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/fr.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2402
src/locales/he.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/hi.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/id.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/it.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/ja.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/ko.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/nb.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/nl.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/pl.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2402
src/locales/pt.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/ro.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/ru.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2402
src/locales/sv.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/th.json Normal file

File diff suppressed because it is too large Load Diff

2402
src/locales/tr.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More