Compare commits

...

5 Commits

13 changed files with 4761 additions and 4504 deletions

6
.gitignore vendored
View File

@@ -1,3 +1,3 @@
node_modules/ node_modules/
deploy-config.json deploy-config.json
.env .env

1348
LICENSE

File diff suppressed because it is too large Load Diff

View File

@@ -1,211 +1,211 @@
# 🚀 Proxmox Deployment Template (TurnKey Node.js) # 🚀 Proxmox Deployment Template (TurnKey Node.js)
**Use this guide to deploy ANY Node.js application to a TurnKey Linux LXC Container.** **Use this guide to deploy ANY Node.js application to a TurnKey Linux LXC Container.**
--- ---
## 📋 Prerequisites ## 📋 Prerequisites
1. **Project**: A Node.js application (Express, Next.js, etc.) in a Git repository. 1. **Project**: A Node.js application (Express, Next.js, etc.) in a Git repository.
2. **Server**: A Proxmox TurnKey Node.js Container. 2. **Server**: A Proxmox TurnKey Node.js Container.
3. **Access**: Root SSH password for the container. 3. **Access**: Root SSH password for the container.
4. **Domain (Optional)**: If using Cloudflare Tunnel. 4. **Domain (Optional)**: If using Cloudflare Tunnel.
--- ---
## 🛠️ Step 1: Prepare Your Project ## 🛠️ Step 1: Prepare Your Project
Ensure your project is ready for production: Ensure your project is ready for production:
1. **Port Configuration**: Ensure your app listens on a configurable port or a fixed internal port (e.g., `4001`). 1. **Port Configuration**: Ensure your app listens on a configurable port or a fixed internal port (e.g., `4001`).
```javascript ```javascript
// server.js // server.js
const PORT = process.env.PORT || 4001; const PORT = process.env.PORT || 4001;
app.listen(PORT, ...); app.listen(PORT, ...);
``` ```
2. **Git Ignore**: Ensure `node_modules` and config files with secrets are ignored. 2. **Git Ignore**: Ensure `node_modules` and config files with secrets are ignored.
```gitignore ```gitignore
node_modules/ node_modules/
.env .env
config.json config.json
``` ```
--- ---
## 🖥️ Step 2: One-Time Server Setup ## 🖥️ Step 2: One-Time Server Setup
SSH into your new container: SSH into your new container:
```bash ```bash
ssh root@<YOUR_SERVER_IP> ssh root@<YOUR_SERVER_IP>
``` ```
Run these commands to prepare the environment: Run these commands to prepare the environment:
### 1. Install Essentials ### 1. Install Essentials
```bash ```bash
apt-get update && apt-get install -y git apt-get update && apt-get install -y git
``` ```
### 2. Prepare Directory ### 2. Prepare Directory
```bash ```bash
# Standard web directory # Standard web directory
mkdir -p /var/www/<APP_NAME> mkdir -p /var/www/<APP_NAME>
cd /var/www/<APP_NAME> cd /var/www/<APP_NAME>
# Clone your repo (Use Basic Auth with Token if private) # Clone your repo (Use Basic Auth with Token if private)
# Format: https://<USER>:<TOKEN>@github.com/<ORG>/<REPO>.git # Format: https://<USER>:<TOKEN>@github.com/<ORG>/<REPO>.git
git clone <YOUR_REPO_URL> . git clone <YOUR_REPO_URL> .
# Install dependencies # Install dependencies
npm install npm install
``` ```
### 3. Setup Permissions ### 3. Setup Permissions
```bash ```bash
# Give ownership to www-data (Nginx user) # Give ownership to www-data (Nginx user)
chown -R www-data:www-data /var/www/<APP_NAME> chown -R www-data:www-data /var/www/<APP_NAME>
``` ```
--- ---
## ⚙️ Step 3: Application Configuration ## ⚙️ Step 3: Application Configuration
### 1. Systemd Service ### 1. Systemd Service
Create a service file to keep your app running. Create a service file to keep your app running.
Create `/etc/systemd/system/<APP_NAME>.service`: Create `/etc/systemd/system/<APP_NAME>.service`:
```ini ```ini
[Unit] [Unit]
Description=<APP_NAME> Service Description=<APP_NAME> Service
After=network.target After=network.target
[Service] [Service]
Type=simple Type=simple
User=root User=root
# OR use 'www-data' if app doesn't need root ports # OR use 'www-data' if app doesn't need root ports
# User=www-data # User=www-data
WorkingDirectory=/var/www/<APP_NAME> WorkingDirectory=/var/www/<APP_NAME>
ExecStart=/usr/local/bin/node server.js ExecStart=/usr/local/bin/node server.js
Restart=always Restart=always
Environment=NODE_ENV=production Environment=NODE_ENV=production
Environment=PORT=4001 Environment=PORT=4001
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target
``` ```
Enable and start: Enable and start:
```bash ```bash
systemctl daemon-reload systemctl daemon-reload
systemctl enable <APP_NAME> systemctl enable <APP_NAME>
systemctl start <APP_NAME> systemctl start <APP_NAME>
``` ```
### 2. Nginx Reverse Proxy ### 2. Nginx Reverse Proxy
Configure Nginx to forward port 80 to your app (Port 4001). Configure Nginx to forward port 80 to your app (Port 4001).
Create `/etc/nginx/sites-available/<APP_NAME>`: Create `/etc/nginx/sites-available/<APP_NAME>`:
```nginx ```nginx
server { server {
listen 80; listen 80;
server_name _; server_name _;
root /var/www/<APP_NAME>; root /var/www/<APP_NAME>;
index index.html; index index.html;
# Serve static files (Optional) # Serve static files (Optional)
location / { location / {
try_files $uri $uri/ =404; try_files $uri $uri/ =404;
} }
# Proxy API/Dynamic requests # Proxy API/Dynamic requests
location /api { location /api {
proxy_pass http://localhost:4001; proxy_pass http://localhost:4001;
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade'; proxy_set_header Connection 'upgrade';
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade; proxy_cache_bypass $http_upgrade;
} }
} }
``` ```
Enable site: Enable site:
```bash ```bash
# Remove defaults # Remove defaults
rm -f /etc/nginx/sites-enabled/default rm -f /etc/nginx/sites-enabled/default
rm -f /etc/nginx/sites-enabled/nodejs rm -f /etc/nginx/sites-enabled/nodejs
# Link new site # Link new site
ln -s /etc/nginx/sites-available/<APP_NAME> /etc/nginx/sites-enabled/ ln -s /etc/nginx/sites-available/<APP_NAME> /etc/nginx/sites-enabled/
# Reload # Reload
nginx -t && systemctl reload nginx nginx -t && systemctl reload nginx
``` ```
--- ---
## ☁️ Step 4: Cloudflare Tunnel (Secure Access) ## ☁️ Step 4: Cloudflare Tunnel (Secure Access)
Expose your app securely without opening router ports. Expose your app securely without opening router ports.
### 1. Install Cloudflared ### 1. Install Cloudflared
```bash ```bash
# Add Key # Add Key
mkdir -p --mode=0755 /usr/share/keyrings mkdir -p --mode=0755 /usr/share/keyrings
curl -fsSL https://pkg.cloudflare.com/cloudflare-public-v2.gpg | tee /usr/share/keyrings/cloudflare-public-v2.gpg >/dev/null curl -fsSL https://pkg.cloudflare.com/cloudflare-public-v2.gpg | tee /usr/share/keyrings/cloudflare-public-v2.gpg >/dev/null
# Add Repo # Add Repo
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-public-v2.gpg] https://pkg.cloudflare.com/cloudflared any main' | tee /etc/apt/sources.list.d/cloudflared.list echo 'deb [signed-by=/usr/share/keyrings/cloudflare-public-v2.gpg] https://pkg.cloudflare.com/cloudflared any main' | tee /etc/apt/sources.list.d/cloudflared.list
# Install # Install
apt-get update && apt-get install -y cloudflared apt-get update && apt-get install -y cloudflared
``` ```
### 2. Create Tunnel ### 2. Create Tunnel
```bash ```bash
cloudflared tunnel login cloudflared tunnel login
cloudflared tunnel create <TUNNEL_NAME> cloudflared tunnel create <TUNNEL_NAME>
# Follow on-screen instructions to map domain -> http://localhost:4001 # Follow on-screen instructions to map domain -> http://localhost:4001
``` ```
--- ---
## 🔄 Step 5: Automated Updates (PowerShell) ## 🔄 Step 5: Automated Updates (PowerShell)
Create a script `deploy-remote.ps1` in your project root to automate updates. Create a script `deploy-remote.ps1` in your project root to automate updates.
**Pre-requisite**: Create `deploy-config.json` (Add to .gitignore!): **Pre-requisite**: Create `deploy-config.json` (Add to .gitignore!):
```json ```json
{ {
"host": "<SERVER_IP>", "host": "<SERVER_IP>",
"username": "root", "username": "root",
"password": "<SSH_PASSWORD>", "password": "<SSH_PASSWORD>",
"remotePath": "/var/www/<APP_NAME>" "remotePath": "/var/www/<APP_NAME>"
} }
``` ```
**Script `deploy-remote.ps1`**: **Script `deploy-remote.ps1`**:
```powershell ```powershell
# Reads config and updates remote server # Reads config and updates remote server
$Config = Get-Content "deploy-config.json" | ConvertFrom-Json $Config = Get-Content "deploy-config.json" | ConvertFrom-Json
$User = $Config.username; $HostName = $Config.host; $Pass = $Config.password $User = $Config.username; $HostName = $Config.host; $Pass = $Config.password
$RemotePath = $Config.remotePath $RemotePath = $Config.remotePath
# Commands to run remotely # Commands to run remotely
$Cmds = " $Cmds = "
cd $RemotePath cd $RemotePath
echo '⬇️ Pulling code...' echo '⬇️ Pulling code...'
git pull git pull
echo '📦 Installing deps...' echo '📦 Installing deps...'
npm install npm install
echo '🚀 Restarting service...' echo '🚀 Restarting service...'
systemctl restart <APP_NAME> systemctl restart <APP_NAME>
systemctl status <APP_NAME> --no-pager systemctl status <APP_NAME> --no-pager
" "
echo y | plink -ssh -t -pw $Pass "$User@$HostName" $Cmds echo y | plink -ssh -t -pw $Pass "$User@$HostName" $Cmds
``` ```
**Usage**: Just run `./deploy-remote.ps1` to deploy! **Usage**: Just run `./deploy-remote.ps1` to deploy!

250
README.md
View File

@@ -1,125 +1,125 @@
# Beyond Cloud Technology - Website Stress Test # Beyond Cloud Technology - Website Stress Test
![Website Stress Test Dashboard](screenshots/hero.png) ![Website Stress Test Dashboard](screenshots/hero.png)
## 🌐 Public Access ## 🌐 Public Access
**Live URL:** [https://website-stress-test.beyondcloud.technology/](https://website-stress-test.beyondcloud.technology/) **Live URL:** [https://website-stress-test.beyondcloud.technology/](https://website-stress-test.beyondcloud.technology/)
--- ---
## 🚀 Overview ## 🚀 Overview
The **Website Stress Test** is a professional-grade load testing tool designed to simulate realistic traffic patterns on your web applications. It helps developers and QA engineers identify bottlenecks, test scalability, and ensure production readiness. The **Website Stress Test** is a professional-grade load testing tool designed to simulate realistic traffic patterns on your web applications. It helps developers and QA engineers identify bottlenecks, test scalability, and ensure production readiness.
Built with a **modern, high-performance architecture**, it includes a custom NodeJS proxy server to bypass CORS restrictions and allow testing of any target URL. Built with a **modern, high-performance architecture**, it includes a custom NodeJS proxy server to bypass CORS restrictions and allow testing of any target URL.
## ✨ Key Features ## ✨ Key Features
### 🛠️ Core Functionality ### 🛠️ Core Functionality
* **Custom HTTP Methods**: Support for GET, POST, PUT, DELETE, and PATCH. * **Custom HTTP Methods**: Support for GET, POST, PUT, DELETE, and PATCH.
* **Configurable Load**: Adjust concurrent users (up to 5000) and test duration. * **Configurable Load**: Adjust concurrent users (up to 5000) and test duration.
* **Traffic Patterns**: * **Traffic Patterns**:
* **Steady**: Constant load. * **Steady**: Constant load.
* **Burst**: Sudden spikes to test resilience. * **Burst**: Sudden spikes to test resilience.
* **Ramp-up**: Gradual increase to find breaking points. * **Ramp-up**: Gradual increase to find breaking points.
* **Random**: Simulate unpredictable real-world traffic. * **Random**: Simulate unpredictable real-world traffic.
* **Crawler Mode**: Automatically crawls the target website to test multiple pages and paths, not just the entry point. * **Crawler Mode**: Automatically crawls the target website to test multiple pages and paths, not just the entry point.
### 📊 Real-Time Analytics ### 📊 Real-Time Analytics
* **Interactive Charts**: Live visualization of Requests Per Second (RPS) and Response Times. * **Interactive Charts**: Live visualization of Requests Per Second (RPS) and Response Times.
* **Detailed Metrics**: Track Active Users, Bandwidth, Success Rates, and Error breakdown (4xx, 5xx, Timeouts). * **Detailed Metrics**: Track Active Users, Bandwidth, Success Rates, and Error breakdown (4xx, 5xx, Timeouts).
* **Percentiles**: Monitor P50, P95, and P99 latency metrics. * **Percentiles**: Monitor P50, P95, and P99 latency metrics.
### 🎨 User Experience ### 🎨 User Experience
* **Modern UI**: Sleek, glassmorphism-inspired design with Light/Dark mode support. * **Modern UI**: Sleek, glassmorphism-inspired design with Light/Dark mode support.
* **Git Versioning**: Automatic display of the current Git Commit ID and deployment age in the UI. * **Git Versioning**: Automatic display of the current Git Commit ID and deployment age in the UI.
* **Responsive Design**: Fully functional on desktop and tablet devices. * **Responsive Design**: Fully functional on desktop and tablet devices.
--- ---
## 📦 Installation & Setup ## 📦 Installation & Setup
### Prerequisites ### Prerequisites
* Node.js (v18 or higher) * Node.js (v18 or higher)
* Nginx (for production deployment) * Nginx (for production deployment)
* PM2 (for process management) * PM2 (for process management)
### 💻 Local Development ### 💻 Local Development
1. **Clone the Repository** 1. **Clone the Repository**
```bash ```bash
git clone https://github.com/DeNNiiInc/Website-Stress-Test.git git clone https://github.com/DeNNiiInc/Website-Stress-Test.git
cd Website-Stress-Test cd Website-Stress-Test
``` ```
2. **Install Dependencies** 2. **Install Dependencies**
```bash ```bash
npm install npm install
``` ```
3. **Start the Proxy Server** 3. **Start the Proxy Server**
```bash ```bash
npm start npm start
``` ```
The server will start on `http://localhost:3000`. The server will start on `http://localhost:3000`.
4. **Open the Application** 4. **Open the Application**
Open `index.html` in your browser or serve it using a static file server (e.g., Live Server). Open `index.html` in your browser or serve it using a static file server (e.g., Live Server).
--- ---
## 🚀 Deployment Guide (Proxmox / Ubuntu) ## 🚀 Deployment Guide (Proxmox / Ubuntu)
This project includes automated deployment scripts for Proxmox/Ubuntu environments. This project includes automated deployment scripts for Proxmox/Ubuntu environments.
### 1. Configuration ### 1. Configuration
Copy `deploy-config.example.json` to `deploy-config.json` and update with your server details: Copy `deploy-config.example.json` to `deploy-config.json` and update with your server details:
```json ```json
{ {
"host": "YOUR_SERVER_IP", "host": "YOUR_SERVER_IP",
"username": "root", "username": "root",
"password": "YOUR_PASSWORD", "password": "YOUR_PASSWORD",
"remotePath": "/var/www/website-stress-test", "remotePath": "/var/www/website-stress-test",
"repoUrl": "https://github.com/DeNNiiInc/Website-Stress-Test.git", "repoUrl": "https://github.com/DeNNiiInc/Website-Stress-Test.git",
"githubToken": "YOUR_GITHUB_TOKEN", "githubToken": "YOUR_GITHUB_TOKEN",
"appName": "website-stress-test" "appName": "website-stress-test"
} }
``` ```
### 2. Auto-Deployment ### 2. Auto-Deployment
Run the PowerShell deployment script: Run the PowerShell deployment script:
```powershell ```powershell
./start-deployment.ps1 ./start-deployment.ps1
``` ```
This script will: This script will:
* Connect to your server via SSH. * Connect to your server via SSH.
* Install Nginx and Node.js if missing. * Install Nginx and Node.js if missing.
* Clone/Pull the latest code. * Clone/Pull the latest code.
* Configure Nginx as a reverse proxy. * Configure Nginx as a reverse proxy.
* Set up a Cron job for auto-updates. * Set up a Cron job for auto-updates.
### 3. Auto-Sync ### 3. Auto-Sync
The system automatically checks for Git updates every 5 minutes. If changes are detected, it pulls the code, installs dependencies, and restarts the backend process without downtime. The system automatically checks for Git updates every 5 minutes. If changes are detected, it pulls the code, installs dependencies, and restarts the backend process without downtime.
**Manual Update Trigger:** **Manual Update Trigger:**
```bash ```bash
/var/www/website-stress-test/auto-sync.sh /var/www/website-stress-test/auto-sync.sh
``` ```
--- ---
## 🔧 Architecture ## 🔧 Architecture
### Backend (`proxy-server.js`) ### Backend (`proxy-server.js`)
* **Role**: Handles CORS requests and authenticates traffic. * **Role**: Handles CORS requests and authenticates traffic.
* **Port**: 3000 (Internal). * **Port**: 3000 (Internal).
* **Endpoints**: * **Endpoints**:
* `/proxy`: Forwards stress test requests. * `/proxy`: Forwards stress test requests.
* `/git-info`: Returns current commit hash and deployment date. * `/git-info`: Returns current commit hash and deployment date.
### Frontend (`index.html` + `script.js`) ### Frontend (`index.html` + `script.js`)
* **Technology**: Vanilla JS + Chart.js. * **Technology**: Vanilla JS + Chart.js.
* **Communication**: Fetch API to the Proxy Server. * **Communication**: Fetch API to the Proxy Server.
--- ---
## 📝 License ## 📝 License
MIT License - Copyright (c) 2025 Beyond Cloud Technology. MIT License - Copyright (c) 2025 Beyond Cloud Technology.

View File

@@ -1,9 +1,9 @@
{ {
"host": "YOUR_SERVER_IP", "host": "YOUR_SERVER_IP",
"username": "root", "username": "root",
"password": "YOUR_SSH_PASSWORD", "password": "YOUR_SSH_PASSWORD",
"remotePath": "/var/www/website-stress-test", "remotePath": "/var/www/website-stress-test",
"repoUrl": "https://github.com/DeNNiiInc/Website-Stress-Test.git", "repoUrl": "https://github.com/DeNNiiInc/Website-Stress-Test.git",
"githubToken": "YOUR_GITHUB_TOKEN", "githubToken": "YOUR_GITHUB_TOKEN",
"appName": "website-stress-test" "appName": "website-stress-test"
} }

1054
index.html

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,17 @@
{ {
"name": "stress-testing-tool", "name": "stress-testing-tool",
"version": "1.0.0", "version": "1.0.0",
"description": "Website stress testing tool with CORS proxy", "description": "Website stress testing tool with CORS proxy",
"main": "proxy-server.js", "main": "proxy-server.js",
"scripts": { "scripts": {
"start": "node proxy-server.js", "start": "node proxy-server.js",
"proxy": "node proxy-server.js" "proxy": "node proxy-server.js"
}, },
"keywords": [ "keywords": [
"stress-testing", "stress-testing",
"load-testing", "load-testing",
"cors-proxy" "cors-proxy"
], ],
"author": "", "author": "",
"license": "MIT" "license": "MIT"
} }

View File

@@ -1,273 +1,351 @@
// =================================== // ===================================
// CORS PROXY SERVER // CORS PROXY SERVER
// =================================== // ===================================
// This proxy server allows the stress testing tool to test // This proxy server allows the stress testing tool to test
// production websites without CORS restrictions. // production websites without CORS restrictions.
const http = require('http'); const http = require('http');
const https = require('https'); const https = require('https');
const url = require('url'); const url = require('url');
const cluster = require('cluster');
const PORT = 3000; const numCPUs = require('os').cpus().length;
// Configuration const fs = require('fs');
const CONFIG = { const path = require('path');
// Maximum request timeout (30 seconds)
timeout: 30000, const PORT = process.env.PORT || 3000;
// Allowed origins (restrict to your stress testing tool's domain) // Configuration
// Use '*' for development, specific domain for production const CONFIG = {
allowedOrigins: '*', // Maximum request timeout (30 seconds)
timeout: 30000,
// Maximum concurrent connections
maxConnections: 5000, // Allowed origins (restrict to your stress testing tool's domain)
// Use '*' for development, specific domain for production
// User agents for rotation allowedOrigins: '*',
userAgents: [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', // Maximum concurrent connections
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', maxConnections: 10000, // Increased for cluster
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:121.0) Gecko/20100101 Firefox/121.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15', // User agents for rotation
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' userAgents: [
] 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
}; 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:121.0) Gecko/20100101 Firefox/121.0',
// Get random user agent 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15',
function getRandomUserAgent() { 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
return CONFIG.userAgents[Math.floor(Math.random() * CONFIG.userAgents.length)]; ]
} };
const { exec } = require('child_process'); // Global agents for connection pooling
const httpAgent = new http.Agent({ keepAlive: true, maxSockets: Infinity });
// Helper to get git info const httpsAgent = new https.Agent({ keepAlive: true, maxSockets: Infinity });
const getGitInfo = () => {
return new Promise((resolve) => { // Get random user agent
exec('git rev-parse --short HEAD && git log -1 --format=%cd --date=relative', (err, stdout) => { function getRandomUserAgent() {
if (err) { return CONFIG.userAgents[Math.floor(Math.random() * CONFIG.userAgents.length)];
console.error('Error fetching git info:', err); }
resolve({ commit: 'Unknown', date: 'Unknown' });
return; const { exec } = require('child_process');
}
const parts = stdout.trim().split('\n'); // Helper to get git info
resolve({ const getGitInfo = () => {
commit: parts[0] || 'Unknown', return new Promise((resolve) => {
date: parts[1] || 'Unknown' exec('git rev-parse --short HEAD && git log -1 --format=%cd --date=relative', (err, stdout) => {
}); if (err) {
}); resolve({ commit: 'Unknown', date: 'Unknown' });
}); return;
}; }
const parts = stdout.trim().split('\n');
// Create the proxy server resolve({
const server = http.createServer((req, res) => { commit: parts[0] || 'Unknown',
// Handle CORS preflight requests date: parts[1] || 'Unknown'
if (req.method === 'OPTIONS') { });
handleCORS(res); });
res.writeHead(200); });
res.end(); };
return;
} if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
// Handle Git Info request console.log(`Spawning ${numCPUs} workers...`);
// Nginx proxy_pass might result in double slashes (//git-info)
if ((req.url === '/git-info' || req.url === '//git-info') && req.method === 'GET') { for (let i = 0; i < numCPUs; i++) {
handleCORS(res); cluster.fork();
getGitInfo().then(info => { }
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(info)); cluster.on('exit', (worker, code, signal) => {
}); console.log(`Worker ${worker.process.pid} died. Respawning...`);
return; cluster.fork();
} });
// Only allow POST requests to the proxy // Master process only listens for SIGINT to gracefully shut down workers
if (req.method !== 'POST') { process.on('SIGINT', () => {
res.writeHead(405, { 'Content-Type': 'application/json' }); console.log('\n\n🛑 Shutting down proxy server (master)...');
res.end(JSON.stringify({ error: 'Method not allowed. Use POST.' })); for (const id in cluster.workers) {
return; cluster.workers[id].kill();
} }
process.exit(0);
// Parse request body });
let body = '';
req.on('data', chunk => { } else {
body += chunk.toString(); // Create the proxy server
}); const server = http.createServer((req, res) => {
// Handle CORS preflight requests
req.on('end', () => { if (req.method === 'OPTIONS') {
try { handleCORS(res);
const proxyRequest = JSON.parse(body); res.writeHead(200);
handleProxyRequest(proxyRequest, res); res.end();
} catch (error) { return;
res.writeHead(400, { 'Content-Type': 'application/json' }); }
res.end(JSON.stringify({
error: 'Invalid JSON', // Health check
message: error.message if (req.url === '/health' || req.url === '//health') {
})); handleCORS(res);
} res.writeHead(200, { 'Content-Type': 'application/json' });
}); res.end(JSON.stringify({ status: 'ok', worker: process.pid }));
}); return;
}
// Handle the actual proxy request
function handleProxyRequest(proxyRequest, clientRes) { // Handle Git Info request
const { targetUrl, method = 'GET', headers = {}, body = null } = proxyRequest; // Nginx proxy_pass might result in double slashes (//git-info)
if ((req.url === '/git-info' || req.url === '//git-info') && req.method === 'GET') {
// Validate target URL handleCORS(res);
if (!targetUrl) { getGitInfo().then(info => {
clientRes.writeHead(400, { 'Content-Type': 'application/json' }); res.writeHead(200, { 'Content-Type': 'application/json' });
clientRes.end(JSON.stringify({ error: 'targetUrl is required' })); res.end(JSON.stringify(info));
return; });
} return;
}
let parsedUrl;
try { // Serve static files for the UI
parsedUrl = new URL(targetUrl); if (req.method === 'GET') {
} catch (error) { let requestPath = req.url.split('?')[0];
clientRes.writeHead(400, { 'Content-Type': 'application/json' }); let filePath = '.' + requestPath;
clientRes.end(JSON.stringify({ error: 'Invalid URL' })); if (requestPath === '/') filePath = './index.html';
return;
} // Basic security: don't allow accessing files outside the directory or sensitive files
const resolvedPath = path.resolve(filePath);
// Determine if we need http or https const rootPath = path.resolve('.');
const protocol = parsedUrl.protocol === 'https:' ? https : http;
if (!resolvedPath.startsWith(rootPath) || filePath.includes('..')) {
// Prepare request options with random user agent res.writeHead(403);
const options = { res.end('Forbidden');
hostname: parsedUrl.hostname, return;
port: parsedUrl.port, }
path: parsedUrl.pathname + parsedUrl.search,
method: method, fs.access(filePath, fs.constants.F_OK, (err) => {
headers: { if (!err) {
...headers, const extname = path.extname(filePath).toLowerCase();
'User-Agent': getRandomUserAgent(), let contentType = 'text/html';
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', const mimeTypes = {
'Accept-Language': 'en-US,en;q=0.5', '.html': 'text/html',
'Accept-Encoding': 'gzip, deflate, br', '.js': 'text/javascript',
'DNT': '1', '.css': 'text/css',
'Connection': 'keep-alive', '.png': 'image/png',
'Upgrade-Insecure-Requests': '1' '.jpg': 'image/jpeg',
}, '.svg': 'image/svg+xml',
timeout: CONFIG.timeout '.json': 'application/json'
}; };
contentType = mimeTypes[extname] || 'text/plain';
const startTime = Date.now();
fs.readFile(filePath, (error, content) => {
// Make the request to the target server if (!error) {
const proxyReq = protocol.request(options, (proxyRes) => { res.writeHead(200, { 'Content-Type': contentType });
const responseTime = Date.now() - startTime; res.end(content, 'utf-8');
} else {
// Collect response data res.writeHead(500);
let responseData = ''; res.end('Server Error');
let responseSize = 0; }
const maxBodySize = 500000; // 500KB limit for crawler });
} else if (req.url === '/health' || req.url === '//health' || (req.url === '/git-info' || req.url === '//git-info')) {
proxyRes.on('data', chunk => { // Handled by other logic (keep going)
responseSize += chunk.length; } else if (req.url === '/') {
// Only collect body if under size limit (for crawler) // Fallback for root if index.html doesn't exist? (Unlikely)
if (responseSize < maxBodySize) { } else {
responseData += chunk.toString(); // Not a static file, maybe fall through to POST check
} }
}); });
proxyRes.on('end', () => { // Special handling for health and git-info which are GET but not files
// Send response back to client with CORS headers if (req.url.includes('/health') || req.url.includes('/git-info')) {
handleCORS(clientRes); // Let it fall through to those handlers
clientRes.writeHead(200, { 'Content-Type': 'application/json' }); } else {
return;
clientRes.end(JSON.stringify({ }
success: true, }
statusCode: proxyRes.statusCode,
statusMessage: proxyRes.statusMessage, // Only allow POST requests to the proxy
responseTime: responseTime, if (req.method !== 'POST') {
headers: proxyRes.headers, res.writeHead(405, { 'Content-Type': 'application/json' });
body: responseData, // Full body for crawler link extraction res.end(JSON.stringify({ error: 'Method not allowed. Use POST.' }));
bodySize: responseSize return;
})); }
});
}); // Parse request body
let body = '';
// Handle request errors req.on('data', chunk => {
proxyReq.on('error', (error) => { body += chunk.toString();
const responseTime = Date.now() - startTime; });
handleCORS(clientRes); req.on('end', () => {
clientRes.writeHead(200, { 'Content-Type': 'application/json' }); try {
const proxyRequest = JSON.parse(body);
clientRes.end(JSON.stringify({ handleProxyRequest(proxyRequest, res);
success: false, } catch (error) {
error: error.message, res.writeHead(400, { 'Content-Type': 'application/json' });
responseTime: responseTime, res.end(JSON.stringify({
statusCode: 0 error: 'Invalid JSON',
})); message: error.message
}); }));
}
// Handle timeout });
proxyReq.on('timeout', () => { });
proxyReq.destroy();
const responseTime = Date.now() - startTime; // Handle the actual proxy request
function handleProxyRequest(proxyRequest, clientRes) {
handleCORS(clientRes); const { targetUrl, method = 'GET', headers = {}, body = null } = proxyRequest;
clientRes.writeHead(200, { 'Content-Type': 'application/json' });
// Validate target URL
clientRes.end(JSON.stringify({ if (!targetUrl) {
success: false, clientRes.writeHead(400, { 'Content-Type': 'application/json' });
error: 'Request timeout', clientRes.end(JSON.stringify({ error: 'targetUrl is required' }));
responseTime: responseTime, return;
statusCode: 0 }
}));
}); let parsedUrl;
try {
// Send request body if present parsedUrl = new URL(targetUrl);
if (body && method !== 'GET' && method !== 'HEAD') { } catch (error) {
proxyReq.write(typeof body === 'string' ? body : JSON.stringify(body)); clientRes.writeHead(400, { 'Content-Type': 'application/json' });
} clientRes.end(JSON.stringify({ error: 'Invalid URL' }));
return;
proxyReq.end(); }
}
// Determine if we need http or https and which agent to use
// Add CORS headers to response const isHttps = parsedUrl.protocol === 'https:';
function handleCORS(res) { const protocol = isHttps ? https : http;
res.setHeader('Access-Control-Allow-Origin', CONFIG.allowedOrigins); const agent = isHttps ? httpsAgent : httpAgent;
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type'); // Prepare request options with random user agent
} const options = {
hostname: parsedUrl.hostname,
// Start the server port: parsedUrl.port,
server.listen(PORT, () => { path: parsedUrl.pathname + parsedUrl.search,
console.log(` method: method,
╔════════════════════════════════════════════════════════════╗ agent: agent, // Use the global agent for connection pooling
CORS Proxy Server for Stress Testing Tool ║ headers: {
╚════════════════════════════════════════════════════════════╝ ...headers,
'User-Agent': getRandomUserAgent(),
✅ Server running on: http://localhost:${PORT} 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
✅ Max connections: ${CONFIG.maxConnections} 'Accept-Language': 'en-US,en;q=0.5',
✅ Request timeout: ${CONFIG.timeout}ms 'Accept-Encoding': 'gzip, deflate, br',
'DNT': '1',
📝 Usage: 'Connection': 'keep-alive',
POST to http://localhost:${PORT} with JSON body: 'Upgrade-Insecure-Requests': '1'
{ },
"targetUrl": "https://example.com", timeout: CONFIG.timeout
"method": "GET", };
"headers": {},
"body": null const startTime = Date.now();
}
// Make the request to the target server
🔒 Security Note: const proxyReq = protocol.request(options, (proxyRes) => {
For production, update CONFIG.allowedOrigins to your const responseTime = Date.now() - startTime;
stress testing tool's domain (not '*')
// Collect response data
Press Ctrl+C to stop the server let responseData = '';
`); let responseSize = 0;
}); const maxBodySize = 500000; // 500KB limit for crawler
// Handle server errors proxyRes.on('data', chunk => {
server.on('error', (error) => { responseSize += chunk.length;
console.error('❌ Server error:', error.message); // Only collect body if under size limit (for crawler)
process.exit(1); if (responseSize < maxBodySize) {
}); responseData += chunk.toString();
}
// Graceful shutdown });
process.on('SIGINT', () => {
console.log('\n\n🛑 Shutting down proxy server...'); proxyRes.on('end', () => {
server.close(() => { // Send response back to client with CORS headers
console.log('✅ Server closed'); handleCORS(clientRes);
process.exit(0); clientRes.writeHead(200, { 'Content-Type': 'application/json' });
});
}); clientRes.end(JSON.stringify({
success: true,
statusCode: proxyRes.statusCode,
statusMessage: proxyRes.statusMessage,
responseTime: responseTime,
headers: proxyRes.headers,
body: responseData, // Full body for crawler link extraction
bodySize: responseSize,
proxyWorker: process.pid // Add worker ID for debugging
}));
});
});
// Handle request errors
proxyReq.on('error', (error) => {
const responseTime = Date.now() - startTime;
handleCORS(clientRes);
clientRes.writeHead(200, { 'Content-Type': 'application/json' });
clientRes.end(JSON.stringify({
success: false,
error: error.message,
responseTime: responseTime,
statusCode: 0
}));
});
// Handle timeout
proxyReq.on('timeout', () => {
proxyReq.destroy();
const responseTime = Date.now() - startTime;
handleCORS(clientRes);
clientRes.writeHead(200, { 'Content-Type': 'application/json' });
clientRes.end(JSON.stringify({
success: false,
error: 'Request timeout',
responseTime: responseTime,
statusCode: 0
}));
});
// Send request body if present
if (body && method !== 'GET' && method !== 'HEAD') {
proxyReq.write(typeof body === 'string' ? body : JSON.stringify(body));
}
proxyReq.end();
}
// Add CORS headers to response
function handleCORS(res) {
res.setHeader('Access-Control-Allow-Origin', CONFIG.allowedOrigins);
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
}
// Start the server
server.listen(PORT, () => {
console.log(`Worker ${process.pid} running on http://localhost:${PORT}`);
});
// Handle server errors
server.on('error', (error) => {
console.error(`❌ Worker ${process.pid} server error:`, error.message);
process.exit(1);
});
// Graceful shutdown for workers
process.on('SIGINT', () => {
console.log(`\n\n🛑 Worker ${process.pid} shutting down...`);
server.close(() => {
console.log(`✅ Worker ${process.pid} closed`);
process.exit(0);
});
});
}

2807
script.js

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +1,25 @@
#!/bin/bash
# setup-server.sh - Initial Setup Script # setup-server.sh - Initial Setup Script
# 1. Install Global Dependencies # 1. System Tuning for High Concurrency
echo "Tuning system limits..."
# Increase max open files for high connection counts
if ! grep -q "soft nofile 65535" /etc/security/limits.conf; then
echo "* soft nofile 65535" >> /etc/security/limits.conf
echo "* hard nofile 65535" >> /etc/security/limits.conf
fi
# Apply limits to current session (for the rest of this script)
ulimit -n 65535
# 2. Install Global Dependencies
echo "Installing PM2..." echo "Installing PM2..."
npm install -g pm2 npm install -g pm2
# 2. Clone Repository # 3. Clone Repository
# Expects: REPO_URL, APP_DIR, GITHUB_TOKEN inside the script or env # ... (rest of cloning logic)
# We'll use arguments passed to this script: $1=REPO_URL $2=APP_DIR $3=GITHUB_TOKEN
REPO_URL="$1" REPO_URL="$1"
APP_DIR="$2" APP_DIR="$2"
GITHUB_TOKEN="$3" GITHUB_TOKEN="$3"
# Construct URL with token for auth
# Extract host and path from REPO_URL (assuming https://github.com/user/repo.git)
# We need to insert token: https://TOKEN@github.com/user/repo.git
# Simple replacement:
AUTH_REPO_URL="${REPO_URL/https:\/\//https:\/\/$GITHUB_TOKEN@}" AUTH_REPO_URL="${REPO_URL/https:\/\//https:\/\/$GITHUB_TOKEN@}"
echo "Preparing application directory: $APP_DIR" echo "Preparing application directory: $APP_DIR"
@@ -33,18 +35,20 @@ else
cd "$APP_DIR" cd "$APP_DIR"
fi fi
# 3. Install App Dependencies # 4. Install App Dependencies
echo "Installing application dependencies..." echo "Installing application dependencies..."
npm install npm install
# 4. Start Application with PM2 # 5. Start Application with PM2
APP_NAME="website-stress-test" APP_NAME="website-stress-test"
echo "Starting application with PM2 ($APP_NAME)..." echo "Starting application with PM2 ($APP_NAME)..."
pm2 start proxy-server.js --name "$APP_NAME" --watch --ignore-watch="node_modules" # Using Node built-in clustering, but PM2 monitors the master
pm2 stop "$APP_NAME" || true
pm2 start proxy-server.js --name "$APP_NAME" --max-memory-restart 1G
pm2 save pm2 save
pm2 startup | tail -n 1 | bash # Setup startup script pm2 startup | tail -n 1 | bash # Setup startup script
# 5. Setup Cron Job for Auto-Sync # 6. Setup Cron Job for Auto-Sync
echo "Setting up Cron Job for auto-sync..." echo "Setting up Cron Job for auto-sync..."
SCRIPT_PATH="$APP_DIR/auto-sync.sh" SCRIPT_PATH="$APP_DIR/auto-sync.sh"
chmod +x "$SCRIPT_PATH" chmod +x "$SCRIPT_PATH"
@@ -52,5 +56,5 @@ chmod +x "$SCRIPT_PATH"
# Add to crontab if not exists # Add to crontab if not exists
(crontab -l 2>/dev/null; echo "*/5 * * * * $SCRIPT_PATH >> /var/log/app-sync.log 2>&1") | crontab - (crontab -l 2>/dev/null; echo "*/5 * * * * $SCRIPT_PATH >> /var/log/app-sync.log 2>&1") | crontab -
echo "✅ Setup Complete! Application is running." echo "✅ Setup Complete! Application is running with system optimizations."
pm2 status pm2 status

View File

@@ -1,73 +1,73 @@
# start-deployment.ps1 # start-deployment.ps1
# Automates the deployment by reading config, uploading scripts, and executing setup. # Automates the deployment by reading config, uploading scripts, and executing setup.
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
$ConfigPath = "deploy-config.json" $ConfigPath = "deploy-config.json"
if (-not (Test-Path $ConfigPath)) { if (-not (Test-Path $ConfigPath)) {
Write-Error "Configuration file '$ConfigPath' not found. Please copy 'deploy-config.example.json' to '$ConfigPath' and fill in your details." Write-Error "Configuration file '$ConfigPath' not found. Please copy 'deploy-config.example.json' to '$ConfigPath' and fill in your details."
} }
$Config = Get-Content $ConfigPath | ConvertFrom-Json $Config = Get-Content $ConfigPath | ConvertFrom-Json
# Validate Config # Validate Config
$Required = @("host", "username", "password", "remotePath", "repoUrl", "githubToken") $Required = @("host", "username", "password", "remotePath", "repoUrl", "githubToken")
foreach ($Key in $Required) { foreach ($Key in $Required) {
if (-not $Config.$Key) { if (-not $Config.$Key) {
Write-Error "Missing required config key: $Key" Write-Error "Missing required config key: $Key"
} }
} }
$User = $Config.username $User = $Config.username
$HostName = $Config.host $HostName = $Config.host
$Pass = $Config.password $Pass = $Config.password
# Note: Using password directly in script is tricky with standard ssh/scp without key. # Note: Using password directly in script is tricky with standard ssh/scp without key.
# We will check if 'sshpass' or 'plink' is available, or guide user to use keys. # We will check if 'sshpass' or 'plink' is available, or guide user to use keys.
# Since the user specifically mentioned providing credentials, they might expect us to use them. # Since the user specifically mentioned providing credentials, they might expect us to use them.
# The template used 'plink -pw $Pass'. We will stick to that if available, or warn. # The template used 'plink -pw $Pass'. We will stick to that if available, or warn.
# Check for plink # Check for plink
if (Get-Command "plink.exe" -ErrorAction SilentlyContinue) { if (Get-Command "plink.exe" -ErrorAction SilentlyContinue) {
Write-Host "Using plink for connection..." Write-Host "Using plink for connection..."
$UsePlink = $true $UsePlink = $true
} }
else { else {
Write-Warning "plink.exe not found. Falling back to standard scp/ssh. You may be prompted for password multiple times." Write-Warning "plink.exe not found. Falling back to standard scp/ssh. You may be prompted for password multiple times."
$UsePlink = $false $UsePlink = $false
} }
$RemoteTmp = "/tmp" $RemoteTmp = "/tmp"
$SetupScript = "setup-server.sh" $SetupScript = "setup-server.sh"
$SyncScript = "auto-sync.sh" $SyncScript = "auto-sync.sh"
Write-Host "🚀 Starting Deployment to $HostName..." Write-Host "🚀 Starting Deployment to $HostName..."
# 1. Upload Scripts # 1. Upload Scripts
Write-Host "Uploading scripts..." Write-Host "Uploading scripts..."
if ($UsePlink) { if ($UsePlink) {
echo y | pscp -P 22 -pw $Pass $SetupScript "$User@$HostName`:$RemoteTmp/$SetupScript" echo y | pscp -P 22 -pw $Pass $SetupScript "$User@$HostName`:$RemoteTmp/$SetupScript"
echo y | pscp -P 22 -pw $Pass $SyncScript "$User@$HostName`:$RemoteTmp/$SyncScript" echo y | pscp -P 22 -pw $Pass $SyncScript "$User@$HostName`:$RemoteTmp/$SyncScript"
} }
else { else {
scp $SetupScript "$User@$HostName`:$RemoteTmp/$SetupScript" scp $SetupScript "$User@$HostName`:$RemoteTmp/$SetupScript"
scp $SyncScript "$User@$HostName`:$RemoteTmp/$SyncScript" scp $SyncScript "$User@$HostName`:$RemoteTmp/$SyncScript"
} }
# 2. Execute Setup # 2. Execute Setup
Write-Host "Executing setup on remote server..." Write-Host "Executing setup on remote server..."
$AppDir = $Config.remotePath $AppDir = $Config.remotePath
$Repo = $Config.repoUrl $Repo = $Config.repoUrl
$Token = $Config.githubToken $Token = $Config.githubToken
# Make scripts executable and run setup # Make scripts executable and run setup
$RemoteCmd = "chmod +x $RemoteTmp/$SetupScript $RemoteTmp/$SyncScript; $RemoteTmp/$SetupScript '$Repo' '$AppDir' '$Token'; rm $RemoteTmp/$SetupScript" $RemoteCmd = "chmod +x $RemoteTmp/$SetupScript $RemoteTmp/$SyncScript; $RemoteTmp/$SetupScript '$Repo' '$AppDir' '$Token'; rm $RemoteTmp/$SetupScript"
if ($UsePlink) { if ($UsePlink) {
echo y | plink -ssh -P 22 -t -pw $Pass "$User@$HostName" $RemoteCmd echo y | plink -ssh -P 22 -t -pw $Pass "$User@$HostName" $RemoteCmd
} }
else { else {
ssh -t "$User@$HostName" $RemoteCmd ssh -t "$User@$HostName" $RemoteCmd
} }
Write-Host "🎉 Deployment command sent!" Write-Host "🎉 Deployment command sent!"

2202
styles.css

File diff suppressed because it is too large Load Diff

318
worker.js Normal file
View File

@@ -0,0 +1,318 @@
// ===================================
// STRESS TESTING TOOL - WEB WORKER
// Handles request loops for a group of users
// ===================================
let config = {};
let state = {
active: false,
users: [],
startTime: 0,
totalRequests: 0,
successfulRequests: 0,
failedRequests: 0,
responseTimes: [],
bytesSent: 0,
bytesReceived: 0,
pageLoadTimes: [],
totalAssetRequests: 0,
errorsByCategory: {
"4xx": 0,
"5xx": 0,
"timeout": 0,
"network": 0
}
};
// Listen for messages from the main thread
self.onmessage = function (e) {
const { type, data } = e.data;
switch (type) {
case 'INIT':
config = data.config;
break;
case 'START':
state.active = true;
state.startTime = Date.now();
startUsers(data.users);
break;
case 'STOP':
state.active = false;
break;
}
};
async function startUsers(userIndices) {
const pattern = config.trafficPattern;
const totalDuration = config.duration * 1000;
for (const index of userIndices) {
if (!state.active) break;
const delay = calculateStartDelay(index, userIndices.length, pattern, totalDuration);
setTimeout(() => {
if (state.active) {
runUser(index);
}
}, delay);
}
// Start reporting results periodically
const reportInterval = setInterval(() => {
if (!state.active) {
clearInterval(reportInterval);
return;
}
reportResults();
}, 500);
}
function calculateStartDelay(index, count, pattern, duration) {
switch (pattern) {
case 'steady':
return (index % count) * 100;
case 'burst':
const burstIndex = Math.floor((index % count) / (count / 5));
return burstIndex * (duration / 5);
case 'rampup':
return (index % count) * (duration / count);
case 'random':
return Math.random() * (duration / 2);
default:
return 0;
}
}
async function runUser(id) {
const endTime = state.startTime + config.duration * 1000;
let currentUrl = config.targetUrl;
let crawlDepth = 0;
while (state.active && Date.now() < endTime) {
const pageLoadStart = performance.now();
const result = await makeRequest(currentUrl);
let totalPageTime = result.responseTime;
// asset simulation
if (config.simulateAssets && result.success && result.body) {
const assets = extractAssets(result.body, currentUrl);
if (assets.length > 0) {
const assetResults = await fetchAssetsThrottled(assets);
const pageLoadEnd = performance.now();
totalPageTime = pageLoadEnd - pageLoadStart;
state.pageLoadTimes.push(totalPageTime);
state.totalAssetRequests += assets.length;
}
}
// Report individual request for history log (sampled)
if (Math.random() < 0.1 || config.userCount < 50) {
self.postMessage({
type: 'LOG',
data: {
url: currentUrl,
status: result.status,
responseTime: result.responseTime,
success: result.success,
timestamp: new Date().toLocaleTimeString()
}
});
}
// Logic for crawler (simplified for worker)
if (config.crawlerEnabled && result.success && result.body && crawlDepth < config.crawlDepth) {
const nextUrl = extractRandomLink(result.body, currentUrl);
if (nextUrl) {
currentUrl = nextUrl;
crawlDepth++;
}
}
// Think time with jitter
const jitter = 0.5 + Math.random(); // 50% to 150%
const sleepTime = config.thinkTime * jitter;
await new Promise(resolve => setTimeout(resolve, sleepTime));
}
}
async function makeRequest(targetUrl) {
const startTime = performance.now();
let result = {
success: false,
status: 0,
responseTime: 0,
body: null
};
try {
const payload = {
targetUrl: targetUrl,
method: config.httpMethod,
headers: config.customHeaders,
body: config.requestBody
};
const payloadStr = JSON.stringify(payload);
state.bytesSent += payloadStr.length;
const response = await fetch(config.proxyUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: payloadStr
});
const proxyResponse = await response.json();
const endTime = performance.now();
result.responseTime = proxyResponse.responseTime || (endTime - startTime);
result.status = proxyResponse.statusCode;
result.success = proxyResponse.success && result.status >= 200 && result.status < 400;
result.body = proxyResponse.body;
if (result.body) {
state.bytesReceived += result.body.length;
}
updateStats(result);
} catch (error) {
result.responseTime = performance.now() - startTime;
state.failedRequests++;
state.errorsByCategory["network"]++;
}
return result;
}
function updateStats(result) {
state.totalRequests++;
if (result.success) {
state.successfulRequests++;
} else {
state.failedRequests++;
const category = categorizeError(result.status);
state.errorsByCategory[category]++;
}
state.responseTimes.push(result.responseTime);
// Keep response times capped in worker to save memory
if (state.responseTimes.length > 500) {
state.responseTimes.shift();
}
}
function categorizeError(status) {
if (status >= 400 && status < 500) return "4xx";
if (status >= 500) return "5xx";
return "network";
}
function reportResults() {
self.postMessage({
type: 'STATS',
data: {
totalRequests: state.totalRequests,
successfulRequests: state.successfulRequests,
failedRequests: state.failedRequests,
bytesSent: state.bytesSent,
bytesReceived: state.bytesReceived,
errorsByCategory: state.errorsByCategory,
responseTimes: state.responseTimes // Sampled
}
});
// Clear local counters that are cumulative but reported incrementally if needed
// Actually, state object above is cumulative. Main thread will track totals.
}
function extractRandomLink(html, baseUrl) {
try {
const linkRegex = /href=["'](https?:\/\/[^"']+|(?:\/[^"']+))["']/gi;
const links = [];
let match;
const baseUrlObj = new URL(baseUrl);
while ((match = linkRegex.exec(html)) !== null) {
let href = match[1];
try {
const absoluteUrl = new URL(href, baseUrl);
if (absoluteUrl.hostname === baseUrlObj.hostname) {
links.push(absoluteUrl.href);
}
} catch (e) { }
if (links.length > 50) break; // Limit extraction
}
if (links.length > 0) {
return links[Math.floor(Math.random() * links.length)];
}
} catch (e) { }
return null;
}
function extractAssets(html, baseUrl) {
const assets = [];
try {
// Regex for scripts, links (css), and images
const scriptRegex = /<script\b[^>]*src=["']([^"']+)["'][^>]*>/gi;
const linkRegex = /<link\b[^>]*href=["']([^"']+)["'][^>]*>/gi;
const imgRegex = /<img\b[^>]*src=["']([^"']+)["'][^>]*>/gi;
const extract = (regex) => {
let match;
while ((match = regex.exec(html)) !== null) {
try {
const url = new URL(match[1], baseUrl).href;
assets.push(url);
} catch (e) { }
if (assets.length > 20) break; // Limit per page for performance
}
};
extract(scriptRegex);
extract(linkRegex);
extract(imgRegex);
} catch (e) { }
return assets;
}
async function fetchAssetsThrottled(assets) {
const limit = 6; // Max concurrent connections like a browser
const results = [];
for (let i = 0; i < assets.length; i += limit) {
const batch = assets.slice(i, i + limit);
const promises = batch.map(url => fetchAsset(url));
results.push(...(await Promise.all(promises)));
if (!state.active) break;
}
return results;
}
async function fetchAsset(url) {
try {
const payload = JSON.stringify({
targetUrl: url,
method: 'GET',
headers: config.customHeaders
});
state.bytesSent += payload.length;
const response = await fetch(config.proxyUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: payload
});
const data = await response.json();
if (data.body) {
state.bytesReceived += data.body.length;
}
return data.success;
} catch (e) {
return false;
}
}