mirror of
https://github.com/DeNNiiInc/Web-Page-Performance-Test.git
synced 2026-04-17 20:05:58 +00:00
Implement high-res video capture with Puppeteer integration in test runner
This commit is contained in:
@@ -186,6 +186,24 @@ async function _executeTest(url, options) {
|
|||||||
const optPath = path.join(reportDir, `${testId}.optimizations.json`);
|
const optPath = path.join(reportDir, `${testId}.optimizations.json`);
|
||||||
fs.writeFileSync(optPath, JSON.stringify(optimizations, null, 2));
|
fs.writeFileSync(optPath, JSON.stringify(optimizations, null, 2));
|
||||||
|
|
||||||
|
// Capture High-Res Video (Run separate pass if needed or extraction)
|
||||||
|
// We already have chrome running. Let's try to capture detailed video.
|
||||||
|
// Note: Lighthouse has finished. We can use the browser instance for a quick video pass.
|
||||||
|
// But ideally we want the video of the FIRST load.
|
||||||
|
// Since we can't easily hook into Lighthouse's run, we accept that we record a "Second Load"
|
||||||
|
// OR we rely on this separate pass for visual record.
|
||||||
|
// Alternatively, if this was the only run, we use it.
|
||||||
|
// For now, let's run a dedicated video capture pass to guarantee quality.
|
||||||
|
let highResFrames = [];
|
||||||
|
try {
|
||||||
|
console.log('Starting High-Res Video Capture pass...');
|
||||||
|
const videoCapture = require('./video-capture');
|
||||||
|
// We reuse the running chrome instance
|
||||||
|
highResFrames = await videoCapture.captureVideo(url, chrome.port);
|
||||||
|
} catch (vidErr) {
|
||||||
|
console.error('High-res video capture failed, falling back to thumbnails:', vidErr);
|
||||||
|
}
|
||||||
|
|
||||||
await chrome.kill();
|
await chrome.kill();
|
||||||
|
|
||||||
// Cleanup User Data Dir
|
// Cleanup User Data Dir
|
||||||
@@ -195,26 +213,32 @@ async function _executeTest(url, options) {
|
|||||||
console.error('Failed to cleanup temp profile:', e);
|
console.error('Failed to cleanup temp profile:', e);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert into Database
|
// Use High-Res frames if available, otherwise fallback to Lighthouse thumbnails
|
||||||
// We expect user_uuid and user_ip to be passed in options, or handle gracefully if not
|
const filmstripData = (highResFrames && highResFrames.length > 5) ? highResFrames : (lhr.audits['screenshot-thumbnails']?.details?.items || []);
|
||||||
const userUuid = options.userUuid || 'anonymous';
|
|
||||||
const userIp = options.userIp || '0.0.0.0';
|
|
||||||
|
|
||||||
const insertQuery = `
|
const summary = {
|
||||||
INSERT INTO test_results (id, url, timestamp, is_mobile, scores, metrics, user_uuid, user_ip, filmstrip)
|
id: testId,
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
url: lhr.finalUrl,
|
||||||
`;
|
timestamp: lhr.fetchTime,
|
||||||
const values = [
|
scores: {
|
||||||
testId,
|
performance: lhr.categories.performance.score * 100,
|
||||||
summary.url,
|
accessibility: lhr.categories.accessibility.score * 100,
|
||||||
summary.timestamp,
|
bestPractices: lhr.categories['best-practices'].score * 100,
|
||||||
isMobile,
|
seo: lhr.categories.seo.score * 100,
|
||||||
summary.scores,
|
},
|
||||||
summary.metrics,
|
metrics: {
|
||||||
userUuid,
|
lcp: lhr.audits['largest-contentful-paint'].numericValue,
|
||||||
userIp,
|
cls: lhr.audits['cumulative-layout-shift'].numericValue,
|
||||||
JSON.stringify(summary.filmstrip) // Ensure it's a JSON string
|
tbt: lhr.audits['total-blocking-time'].numericValue,
|
||||||
];
|
},
|
||||||
|
userAgent: lhr.userAgent,
|
||||||
|
isMobile: isMobile,
|
||||||
|
filmstrip: filmstripData
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update summary file with new filmstrip
|
||||||
|
const jsonPath = path.join(reportDir, `${testId}.json`);
|
||||||
|
fs.writeFileSync(jsonPath, JSON.stringify(summary, null, 2));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const db = require('../lib/db');
|
const db = require('../lib/db');
|
||||||
|
|||||||
96
lib/video-capture.js
Normal file
96
lib/video-capture.js
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
const puppeteer = require('puppeteer-core');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Captures a high-resolution filmstrip of the page load
|
||||||
|
* @param {string} url - The URL to capture
|
||||||
|
* @param {number} port - Debugging port of the Chrome instance
|
||||||
|
* @returns {Promise<Array<{data: string, timing: number}>>} - Array of frames with base64 data and timing
|
||||||
|
*/
|
||||||
|
async function captureVideo(url, port) {
|
||||||
|
let browser;
|
||||||
|
let page;
|
||||||
|
const frames = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Connect to the existing Chrome instance launched by chrome-launcher
|
||||||
|
// We need to fetch the WebSocket debugger URL first
|
||||||
|
const versionUrl = `http://127.0.0.1:${port}/json/version`;
|
||||||
|
const resp = await fetch(versionUrl);
|
||||||
|
const versionData = await resp.json();
|
||||||
|
const browserWSEndpoint = versionData.webSocketDebuggerUrl;
|
||||||
|
|
||||||
|
browser = await puppeteer.connect({
|
||||||
|
browserWSEndpoint,
|
||||||
|
defaultViewport: { width: 1920, height: 1080 }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a new page (tab) for tracking
|
||||||
|
page = await browser.newPage();
|
||||||
|
|
||||||
|
// Optimize for screenshots
|
||||||
|
await page.setViewport({ width: 1920, height: 1080, deviceScaleFactor: 1 });
|
||||||
|
|
||||||
|
// Start capturing loop
|
||||||
|
let isCapturing = true;
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Capture loop
|
||||||
|
const captureLoop = async () => {
|
||||||
|
while (isCapturing) {
|
||||||
|
try {
|
||||||
|
if (page.isClosed()) break;
|
||||||
|
|
||||||
|
const screenshot = await page.screenshot({
|
||||||
|
encoding: 'base64',
|
||||||
|
type: 'jpeg',
|
||||||
|
quality: 60, // Good balance for video, reduces size (1080p is large)
|
||||||
|
optimizeForSpeed: true
|
||||||
|
});
|
||||||
|
|
||||||
|
frames.push({
|
||||||
|
data: 'data:image/jpeg;base64,' + screenshot,
|
||||||
|
timing: Date.now() - startTime
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cap at 100 frames (~10 seconds at 10fps) to prevent DB explosion
|
||||||
|
if (frames.length >= 100) break;
|
||||||
|
|
||||||
|
// Aim for 10 FPS (100ms)
|
||||||
|
await new Promise(r => setTimeout(r, 100));
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Frame capture error:', e);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start capture loop NOT awaited (runs in parallel)
|
||||||
|
const capturePromise = captureLoop();
|
||||||
|
|
||||||
|
// Navigate
|
||||||
|
console.log(`[Video] Navigating to ${url}...`);
|
||||||
|
await page.goto(url, { waitUntil: 'networkidle0', timeout: 30000 });
|
||||||
|
|
||||||
|
// Wait a bit more for visual stability
|
||||||
|
await new Promise(r => setTimeout(r, 1000));
|
||||||
|
|
||||||
|
// Stop capturing
|
||||||
|
isCapturing = false;
|
||||||
|
await capturePromise;
|
||||||
|
|
||||||
|
console.log(`[Video] Captured ${frames.length} frames.`);
|
||||||
|
|
||||||
|
await page.close();
|
||||||
|
browser.disconnect(); // Don't close the browser, just disconnect!
|
||||||
|
|
||||||
|
return frames;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Video capture failed:', error);
|
||||||
|
if (page) await page.close().catch(() => {});
|
||||||
|
if (browser) browser.disconnect();
|
||||||
|
return []; // Return empty on failure so test doesn't fail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { captureVideo };
|
||||||
@@ -20,6 +20,7 @@
|
|||||||
"express": "^4.18.2",
|
"express": "^4.18.2",
|
||||||
"lighthouse": "^13.0.1",
|
"lighthouse": "^13.0.1",
|
||||||
"pg": "^8.16.3",
|
"pg": "^8.16.3",
|
||||||
|
"puppeteer-core": "^21.0.0",
|
||||||
"uuid": "^13.0.0"
|
"uuid": "^13.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
Reference in New Issue
Block a user