diff --git a/lib/runner.js b/lib/runner.js index 550dd93..884e707 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -186,6 +186,24 @@ async function _executeTest(url, options) { const optPath = path.join(reportDir, `${testId}.optimizations.json`); fs.writeFileSync(optPath, JSON.stringify(optimizations, null, 2)); + // Capture High-Res Video (Run separate pass if needed or extraction) + // We already have chrome running. Let's try to capture detailed video. + // Note: Lighthouse has finished. We can use the browser instance for a quick video pass. + // But ideally we want the video of the FIRST load. + // Since we can't easily hook into Lighthouse's run, we accept that we record a "Second Load" + // OR we rely on this separate pass for visual record. + // Alternatively, if this was the only run, we use it. + // For now, let's run a dedicated video capture pass to guarantee quality. + let highResFrames = []; + try { + console.log('Starting High-Res Video Capture pass...'); + const videoCapture = require('./video-capture'); + // We reuse the running chrome instance + highResFrames = await videoCapture.captureVideo(url, chrome.port); + } catch (vidErr) { + console.error('High-res video capture failed, falling back to thumbnails:', vidErr); + } + await chrome.kill(); // Cleanup User Data Dir @@ -195,26 +213,32 @@ async function _executeTest(url, options) { console.error('Failed to cleanup temp profile:', e); } - // Insert into Database - // We expect user_uuid and user_ip to be passed in options, or handle gracefully if not - const userUuid = options.userUuid || 'anonymous'; - const userIp = options.userIp || '0.0.0.0'; + // Use High-Res frames if available, otherwise fallback to Lighthouse thumbnails + const filmstripData = (highResFrames && highResFrames.length > 5) ? highResFrames : (lhr.audits['screenshot-thumbnails']?.details?.items || []); - const insertQuery = ` - INSERT INTO test_results (id, url, timestamp, is_mobile, scores, metrics, user_uuid, user_ip, filmstrip) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - `; - const values = [ - testId, - summary.url, - summary.timestamp, - isMobile, - summary.scores, - summary.metrics, - userUuid, - userIp, - JSON.stringify(summary.filmstrip) // Ensure it's a JSON string - ]; + const summary = { + id: testId, + url: lhr.finalUrl, + timestamp: lhr.fetchTime, + scores: { + performance: lhr.categories.performance.score * 100, + accessibility: lhr.categories.accessibility.score * 100, + bestPractices: lhr.categories['best-practices'].score * 100, + seo: lhr.categories.seo.score * 100, + }, + metrics: { + lcp: lhr.audits['largest-contentful-paint'].numericValue, + cls: lhr.audits['cumulative-layout-shift'].numericValue, + tbt: lhr.audits['total-blocking-time'].numericValue, + }, + userAgent: lhr.userAgent, + isMobile: isMobile, + filmstrip: filmstripData + }; + + // Update summary file with new filmstrip + const jsonPath = path.join(reportDir, `${testId}.json`); + fs.writeFileSync(jsonPath, JSON.stringify(summary, null, 2)); try { const db = require('../lib/db'); diff --git a/lib/video-capture.js b/lib/video-capture.js new file mode 100644 index 0000000..6017b4e --- /dev/null +++ b/lib/video-capture.js @@ -0,0 +1,96 @@ +const puppeteer = require('puppeteer-core'); + +/** + * Captures a high-resolution filmstrip of the page load + * @param {string} url - The URL to capture + * @param {number} port - Debugging port of the Chrome instance + * @returns {Promise>} - Array of frames with base64 data and timing + */ +async function captureVideo(url, port) { + let browser; + let page; + const frames = []; + + try { + // Connect to the existing Chrome instance launched by chrome-launcher + // We need to fetch the WebSocket debugger URL first + const versionUrl = `http://127.0.0.1:${port}/json/version`; + const resp = await fetch(versionUrl); + const versionData = await resp.json(); + const browserWSEndpoint = versionData.webSocketDebuggerUrl; + + browser = await puppeteer.connect({ + browserWSEndpoint, + defaultViewport: { width: 1920, height: 1080 } + }); + + // Create a new page (tab) for tracking + page = await browser.newPage(); + + // Optimize for screenshots + await page.setViewport({ width: 1920, height: 1080, deviceScaleFactor: 1 }); + + // Start capturing loop + let isCapturing = true; + const startTime = Date.now(); + + // Capture loop + const captureLoop = async () => { + while (isCapturing) { + try { + if (page.isClosed()) break; + + const screenshot = await page.screenshot({ + encoding: 'base64', + type: 'jpeg', + quality: 60, // Good balance for video, reduces size (1080p is large) + optimizeForSpeed: true + }); + + frames.push({ + data: 'data:image/jpeg;base64,' + screenshot, + timing: Date.now() - startTime + }); + + // Cap at 100 frames (~10 seconds at 10fps) to prevent DB explosion + if (frames.length >= 100) break; + + // Aim for 10 FPS (100ms) + await new Promise(r => setTimeout(r, 100)); + } catch (e) { + console.error('Frame capture error:', e); + break; + } + } + }; + + // Start capture loop NOT awaited (runs in parallel) + const capturePromise = captureLoop(); + + // Navigate + console.log(`[Video] Navigating to ${url}...`); + await page.goto(url, { waitUntil: 'networkidle0', timeout: 30000 }); + + // Wait a bit more for visual stability + await new Promise(r => setTimeout(r, 1000)); + + // Stop capturing + isCapturing = false; + await capturePromise; + + console.log(`[Video] Captured ${frames.length} frames.`); + + await page.close(); + browser.disconnect(); // Don't close the browser, just disconnect! + + return frames; + + } catch (error) { + console.error('Video capture failed:', error); + if (page) await page.close().catch(() => {}); + if (browser) browser.disconnect(); + return []; // Return empty on failure so test doesn't fail + } +} + +module.exports = { captureVideo }; diff --git a/package.json b/package.json index f97b636..d6ad6d4 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "express": "^4.18.2", "lighthouse": "^13.0.1", "pg": "^8.16.3", + "puppeteer-core": "^21.0.0", "uuid": "^13.0.0" }, "devDependencies": {