From 19923f43697229ea949cf563c62ef96676d58f7f Mon Sep 17 00:00:00 2001 From: Malin Date: Tue, 7 Apr 2026 11:05:05 +0200 Subject: [PATCH] debug: log outputFolder + JSON files to docker logs; fix walker to use stat - runner.js: console.log outputFolder, sitespeed INFO lines, and find output after test so everything appears in docker logs - parser.js: switch walker from withFileTypes to stat (more reliable on Docker volumes); log every directory visited Co-Authored-By: Claude Sonnet 4.6 --- parser.js | 21 ++++++++++++--------- runner.js | 43 +++++++++++++++++++++++++++++-------------- 2 files changed, 41 insertions(+), 23 deletions(-) diff --git a/parser.js b/parser.js index a0bfcfc..0930892 100644 --- a/parser.js +++ b/parser.js @@ -1,4 +1,4 @@ -import { readdir, readFile } from 'fs/promises'; +import { readdir, readFile, stat } from 'fs/promises'; import { join } from 'path'; /** @@ -11,19 +11,22 @@ async function findPageSummaries(outputFolder) { const errors = []; async function walk(dir) { - let entries; + let names; try { - entries = await readdir(dir, { withFileTypes: true }); + names = await readdir(dir); } catch (err) { - errors.push(`readdir failed on ${dir}: ${err.message}`); + errors.push(`readdir(${dir}): ${err.message}`); return; } - for (const e of entries) { - const full = join(dir, e.name); - if (e.isDirectory()) { + console.log(`[parser] walk ${dir} → [${names.join(', ')}]`); + for (const name of names) { + const full = join(dir, name); + let s; + try { s = await stat(full); } catch { continue; } + if (s.isDirectory()) { await walk(full); - } else if (e.name.endsWith('.pageSummary.json')) { - const plugin = e.name.replace('.pageSummary.json', ''); + } else if (name.endsWith('.pageSummary.json')) { + const plugin = name.replace('.pageSummary.json', ''); if (!summaries[plugin]) summaries[plugin] = []; summaries[plugin].push(full); console.log(`[parser] found: ${full}`); diff --git a/runner.js b/runner.js index 08f3756..7f55154 100644 --- a/runner.js +++ b/runner.js @@ -1,4 +1,4 @@ -import { spawn } from 'child_process'; +import { spawn, execSync } from 'child_process'; import { join, dirname } from 'path'; import { fileURLToPath } from 'url'; import { existsSync } from 'fs'; @@ -12,6 +12,10 @@ export function runTest(job, onLine) { const outputFolder = join(REPORTS_DIR, job.id); const isDocker = !!process.env.IN_DOCKER; + // Log to docker logs (stdout of main process) so it appears in `docker logs` + console.log(`[runner] REPORTS_DIR=${REPORTS_DIR}`); + console.log(`[runner] outputFolder=${outputFolder}`); + const sitespeedArgs = [ job.url, '--browser', job.browser, @@ -32,31 +36,23 @@ export function runTest(job, onLine) { sitespeedArgs.push('--browsertime.chrome.args', 'disable-gpu'); } - // Do not force DISPLAY — sitespeed.io starts and manages its own Xvfb const env = { ...process.env }; let child; if (isDocker) { - // SITESPEED_BIN is set by start.sh from the build-time path discovery const bin = process.env.SITESPEED_BIN; if (!bin) { return reject(new Error( - 'SITESPEED_BIN is not set. The Docker build may not have found sitespeed.js.\n' + - 'Check build logs for "Build-time sitespeed.js found at:"' + 'SITESPEED_BIN is not set. Check build logs for "Build-time sitespeed.js found at:"' )); } - onLine(`[runner] node ${bin}`); - onLine(`[runner] DISPLAY=${env.DISPLAY}`); + console.log(`[runner] spawning: node ${bin} ${sitespeedArgs.slice(0,3).join(' ')} ...`); child = spawn('node', [bin, ...sitespeedArgs], { cwd: __dirname, env }); } else { if (!existsSync(LOCAL_BIN)) { - return reject(new Error( - `Local sitespeed.io not found at ${LOCAL_BIN}\n` + - `Run: cd /home/malin/c0ding/sitespeed.io && npm install` - )); + return reject(new Error(`Local sitespeed.io not found at ${LOCAL_BIN}`)); } - onLine(`[runner] node ${LOCAL_BIN.slice(-40)}...`); child = spawn('node', [LOCAL_BIN, ...sitespeedArgs], { cwd: __dirname, env }); } @@ -64,16 +60,35 @@ export function runTest(job, onLine) { child.stdout.on('data', (data) => { const lines = data.toString().split('\n').filter(Boolean); - for (const line of lines) { allLines.push(line); onLine(line); } + for (const line of lines) { + allLines.push(line); + onLine(line); + // Mirror INFO/ERROR lines to docker logs too + if (line.includes('INFO:') || line.includes('ERROR:') || line.includes('stored in')) { + console.log('[sitespeed]', line); + } + } }); child.stderr.on('data', (data) => { const lines = data.toString().split('\n').filter(Boolean); - for (const line of lines) { allLines.push('[stderr] ' + line); onLine('[stderr] ' + line); } + for (const line of lines) { + allLines.push('[stderr] ' + line); + onLine('[stderr] ' + line); + if (line.includes('ERROR:')) console.error('[sitespeed stderr]', line); + } }); child.on('close', (code) => { if (code === 0) { + // Log what was actually written so we can debug the parser + try { + const found = execSync( + `find "${outputFolder}" -name "*.json" 2>/dev/null | head -30`, + { encoding: 'utf8' } + ).trim(); + console.log(`[runner] JSON files written:\n${found || '(none found)'}`); + } catch {} resolve(outputFolder); } else { const tail = allLines.slice(-20).join('\n');