core(config): remove config.artifacts; always use auditMode (#4986)

This commit is contained in:
Brendan Kenny 2018-04-18 09:45:22 -07:00 коммит произвёл GitHub
Родитель 3ce82a1930
Коммит 84debe279f
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
16 изменённых файлов: 99 добавлений и 287 удалений

2
.gitignore поставляемый
Просмотреть файл

@ -26,6 +26,8 @@ last-run-results.html
!lighthouse-core/test/results/artifacts/*.trace.json
!lighthouse-core/test/results/artifacts/*.devtoolslog.json
!lighthouse-core/test/fixtures/artifacts/**/*.trace.json
!lighthouse-core/test/fixtures/artifacts/**/*.devtoolslog.json
latest-run

Просмотреть файл

@ -93,28 +93,22 @@ $ lighthouse --port=9222 --disable-device-emulation --disable-cpu-throttling htt
## Lighthouse as trace processor
Lighthouse can be used to analyze trace and performance data collected from other tools (like WebPageTest and ChromeDriver). The `traces` and `devtoolsLogs` artifact items can be provided using a string for the absolute path on disk. The `devtoolsLogs` array is captured from the `Network` and `Page` domains (a la ChromeDriver's [enableNetwork and enablePage options]((https://sites.google.com/a/chromium.org/chromedriver/capabilities#TOC-perfLoggingPrefs-object)).
Lighthouse can be used to analyze trace and performance data collected from other tools (like WebPageTest and ChromeDriver). The `traces` and `devtoolsLogs` artifact items can be provided using a string for the absolute path on disk if they're saved with `.trace.json` and `.devtoolslog.json` file extensions, respectively. The `devtoolsLogs` array is captured from the `Network` and `Page` domains (a la ChromeDriver's [enableNetwork and enablePage options]((https://sites.google.com/a/chromium.org/chromedriver/capabilities#TOC-perfLoggingPrefs-object)).
As an example, here's a trace-only run that's reporting on user timings and critical request chains:
As an example, here's a trace-only run that reports on user timings and critical request chains:
### `config.json`
```json
{
"settings": {
"auditMode": "/User/me/lighthouse/lighthouse-core/test/fixtures/artifacts/perflog/",
},
"audits": [
"user-timings",
"critical-request-chains"
],
"artifacts": {
"traces": {
"defaultPass": "/User/me/lighthouse/lighthouse-core/test/fixtures/traces/trace-user-timings.json"
},
"devtoolsLogs": {
"defaultPass": "/User/me/lighthouse/lighthouse-core/test/fixtures/traces/perflog.json"
}
},
"categories": {
"performance": {
"name": "Performance Metrics",

Просмотреть файл

@ -17,89 +17,6 @@ const path = require('path');
const Audit = require('../audits/audit');
const Runner = require('../runner');
// cleanTrace is run to remove duplicate TracingStartedInPage events,
// and to change TracingStartedInBrowser events into TracingStartedInPage.
// This is done by searching for most occuring threads and basing new events
// off of those.
function cleanTrace(trace) {
const traceEvents = trace.traceEvents;
// Keep track of most occuring threads
const threads = [];
const countsByThread = {};
const traceStartEvents = [];
const makeMockEvent = (evt, ts) => {
return {
pid: evt.pid,
tid: evt.tid,
ts: ts || 0, // default to 0 for now
ph: 'I',
cat: 'disabled-by-default-devtools.timeline',
name: 'TracingStartedInPage',
args: {
data: {
page: evt.frame,
},
},
s: 't',
};
};
let frame;
let data;
let name;
let counter;
traceEvents.forEach((evt, idx) => {
if (evt.name.startsWith('TracingStartedIn')) {
traceStartEvents.push(idx);
}
// find the event's frame
data = evt.args && (evt.args.data || evt.args.beginData || evt.args.counters);
frame = (evt.args && evt.args.frame) || data && (data.frame || data.page);
if (!frame) {
return;
}
// Increase occurences count of the frame
name = `pid${evt.pid}-tid${evt.tid}-frame${frame}`;
counter = countsByThread[name];
if (!counter) {
counter = {
pid: evt.pid,
tid: evt.tid,
frame: frame,
count: 0,
};
countsByThread[name] = counter;
threads.push(counter);
}
counter.count++;
});
// find most active thread (and frame)
threads.sort((a, b) => b.count - a.count);
const mostActiveFrame = threads[0];
// Remove all current TracingStartedIn* events, storing
// the first events ts.
const ts = traceEvents[traceStartEvents[0]] && traceEvents[traceStartEvents[0]].ts;
// account for offset after removing items
let i = 0;
for (const dup of traceStartEvents) {
traceEvents.splice(dup - i, 1);
i++;
}
// Add a new TracingStartedInPage event based on most active thread
// and using TS of first found TracingStartedIn* event
traceEvents.unshift(makeMockEvent(mostActiveFrame, ts));
return trace;
}
function validatePasses(passes, audits) {
if (!Array.isArray(passes)) {
return;
@ -214,38 +131,6 @@ function assertValidGatherer(gathererInstance, gathererName) {
}
}
function expandArtifacts(artifacts) {
if (!artifacts) {
return null;
}
// currently only trace logs and performance logs should be imported
if (artifacts.traces) {
Object.keys(artifacts.traces).forEach(key => {
log.log('info', 'Normalizng trace contents into expected state...');
let trace = require(artifacts.traces[key]);
// Before Chrome 54.0.2816 (codereview.chromium.org/2161583004), trace was
// an array of trace events. After this point, trace is an object with a
// traceEvents property. Normalize to new format.
if (Array.isArray(trace)) {
trace = {
traceEvents: trace,
};
}
trace = cleanTrace(trace);
artifacts.traces[key] = trace;
});
}
if (artifacts.devtoolsLogs) {
Object.keys(artifacts.devtoolsLogs).forEach(key => {
artifacts.devtoolsLogs[key] = require(artifacts.devtoolsLogs[key]);
});
}
return artifacts;
}
/**
* Creates a settings object from potential flags object by dropping all the properties
* that don't exist on Config.Settings.
@ -367,7 +252,6 @@ class Config {
this._passes = Config.requireGatherers(configJSON.passes, this._configDir);
this._audits = Config.requireAudits(configJSON.audits, this._configDir);
this._artifacts = expandArtifacts(configJSON.artifacts);
this._categories = configJSON.categories;
this._groups = configJSON.groups;
this._settings = configJSON.settings || {};
@ -782,11 +666,6 @@ class Config {
return this._audits;
}
/** @type {Array<!Artifacts>} */
get artifacts() {
return this._artifacts;
}
/** @type {Object<{audits: !Array<{id: string, weight: number}>}>} */
get categories() {
return this._categories;

Просмотреть файл

@ -80,13 +80,16 @@ img {
async function loadArtifacts(basePath) {
log.log('Reading artifacts from disk:', basePath);
if (!fs.existsSync(basePath)) return Promise.reject(new Error('No saved artifacts found'));
if (!fs.existsSync(basePath)) {
throw new Error('No saved artifacts found at ' + basePath);
}
// load artifacts.json
const filenames = fs.readdirSync(basePath);
/** @type {LH.Artifacts} */
const artifacts = JSON.parse(fs.readFileSync(path.join(basePath, artifactsFilename), 'utf8'));
const filenames = fs.readdirSync(basePath);
// load devtoolsLogs
artifacts.devtoolsLogs = {};
filenames.filter(f => f.endsWith(devtoolsLogSuffix)).map(filename => {

Просмотреть файл

@ -70,18 +70,16 @@ class Runner {
opts.url = parsedURL.href;
// User can run -G solo, -A solo, or -GA together
// -G and -A will do run partial lighthouse pipelines,
// -G and -A will run partial lighthouse pipelines,
// and -GA will run everything plus save artifacts to disk
// Gather phase
// Either load saved artifacts off disk, from config, or get from the browser
// Either load saved artifacts off disk or from the browser
let artifacts;
if (settings.auditMode && !settings.gatherMode) {
// No browser required, just load the artifacts from disk.
const path = Runner._getArtifactsPath(settings);
artifacts = await assetSaver.loadArtifacts(path);
} else if (opts.config.artifacts) {
artifacts = opts.config.artifacts;
} else {
artifacts = await Runner._gatherArtifactsFromBrowser(opts, connection);
// -G means save these to ./latest-run, etc.
@ -115,7 +113,8 @@ class Runner {
const resultsById = {};
for (const audit of auditResults) resultsById[audit.name] = audit;
let reportCategories;
/** @type {Array<LH.Result.Category>} */
let reportCategories = [];
if (opts.config.categories) {
reportCategories = ReportScoring.scoreAllCategories(opts.config.categories, resultsById);
}

Просмотреть файл

@ -12,7 +12,6 @@ const defaultConfig = require('../../config/default-config.js');
const log = require('lighthouse-logger');
const Gatherer = require('../../gather/gatherers/gatherer');
const Audit = require('../../audits/audit');
const Runner = require('../../runner');
/* eslint-env mocha */
@ -473,68 +472,6 @@ describe('Config', () => {
assert.ok(config.settings.disableDeviceEmulation, 'missing setting from flags');
});
describe('artifact loading', () => {
it('expands artifacts', () => {
const config = new Config({
artifacts: {
traces: {
defaultPass: path.resolve(__dirname, '../fixtures/traces/trace-user-timings.json'),
},
devtoolsLogs: {
defaultPass: path.resolve(__dirname, '../fixtures/perflog.json'),
},
},
});
const computed = Runner.instantiateComputedArtifacts();
const traceUserTimings = require('../fixtures/traces/trace-user-timings.json');
assert.deepStrictEqual(config.artifacts.traces.defaultPass.traceEvents, traceUserTimings);
const devtoolsLogs = config.artifacts.devtoolsLogs.defaultPass;
assert.equal(devtoolsLogs.length, 555);
return computed.requestNetworkRecords(devtoolsLogs).then(records => {
assert.equal(records.length, 76);
});
});
it('expands artifacts with multiple named passes', () => {
const config = new Config({
artifacts: {
traces: {
defaultPass: path.resolve(__dirname, '../fixtures/traces/trace-user-timings.json'),
otherPass: path.resolve(__dirname, '../fixtures/traces/trace-user-timings.json'),
},
devtoolsLogs: {
defaultPass: path.resolve(__dirname, '../fixtures/perflog.json'),
otherPass: path.resolve(__dirname, '../fixtures/perflog.json'),
},
},
});
const traceUserTimings = require('../fixtures/traces/trace-user-timings.json');
assert.deepStrictEqual(config.artifacts.traces.defaultPass.traceEvents, traceUserTimings);
assert.deepStrictEqual(config.artifacts.traces.otherPass.traceEvents, traceUserTimings);
assert.equal(config.artifacts.devtoolsLogs.defaultPass.length, 555);
assert.equal(config.artifacts.devtoolsLogs.otherPass.length, 555);
});
it('handles traces with no TracingStartedInPage events', () => {
const config = new Config({
artifacts: {
traces: {
defaultPass: path.resolve(__dirname,
'../fixtures/traces/trace-user-timings-no-tracingstartedinpage.json'),
},
devtoolsLogs: {
defaultPass: path.resolve(__dirname, '../fixtures/perflog.json'),
},
},
});
assert.ok(config.artifacts.traces.defaultPass.traceEvents.find(
e => e.name === 'TracingStartedInPage' && e.args.data.page === '0xhad00p'));
});
});
describe('#extendConfigJSON', () => {
it('should merge passes', () => {
const configA = {

Просмотреть файл

@ -0,0 +1 @@
{}

20
lighthouse-core/test/fixtures/artifacts/perflog/artifacts.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1,20 @@
{
"LighthouseRunWarnings": [
"I'm a warning!",
"Also a warning"
],
"UserAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3358.0 Safari/537.36",
"fetchedAt": "2018-03-13T00:55:45.840Z",
"URL": {
"initialUrl": "https://www.reddit.com/r/nba",
"finalUrl": "https://www.reddit.com/r/nba"
},
"Viewport": null,
"ViewportDimensions": {
"innerWidth": 412,
"innerHeight": 732,
"outerWidth": 412,
"outerHeight": 732,
"devicePixelRatio": 2.625
}
}

Просмотреть файл

@ -0,0 +1,14 @@
[
{"pid":41904,"tid":1295,"ts":1676836141,"ph":"I","cat":"disabled-by-default-devtools.timeline","name":"TracingStartedInPage","args":{"data":{"page":"0xf5fc2501e08","sessionId":"9331.8"}},"tts":314881,"s":"t"},
{"pid":41904,"tid":1295,"ts":506085991145,"ph":"R","cat":"blink.user_timing","name":"navigationStart","args":{"frame": "0xf5fc2501e08"},"tts":314882},
{"pid":41904,"tid":1295,"ts":506085991146,"ph":"R","cat":"blink.user_timing","name":"firstPaint","args":{"frame": "0xf5fc2501e08"},"tts":314883},
{"pid":41904,"tid":1295,"ts":506085991146,"ph":"R","cat":"blink.user_timing","name":"firstContentfulPaint","args":{"frame": "0xf5fc2501e08"},"tts":314883},
{"pid":41904,"tid":1295,"ts":506085991146,"ph":"R","cat":"blink.user_timing","name":"paintNonDefaultBackgroundColor","args":{},"tts":314883},
{"pid":41904,"tid":1295,"ts":506086992099,"ph":"R","cat":"blink.user_timing","name":"mark_test","args":{},"tts":331149},
{"pid":41904,"tid":1295,"ts":506086992100,"ph":"R","cat":"blink.user_timing","name":"goog_123_3_1_start","args":{},"tts":331150},
{"pid":41904,"tid":1295,"ts":506086992101,"ph":"R","cat":"blink.user_timing","name":"goog_123_3_1_end","args":{},"tts":331151},
{"pid":41904,"tid":1295,"ts":506085991147,"ph":"b","cat":"blink.user_timing","name":"measure_test","args":{},"tts":331184,"id":"0x73b016"},
{"pid":41904,"tid":1295,"ts":506086992112,"ph":"e","cat":"blink.user_timing","name":"measure_test","args":{},"tts":331186,"id":"0x73b016"},
{"pid":41904,"tid":1295,"ts":506085991148,"ph":"b","cat":"blink.user_timing","name":"goog_123_3_1","args":{},"tts":331184,"id":"0x73b016"},
{"pid":41904,"tid":1295,"ts":506086992113,"ph":"e","cat":"blink.user_timing","name":"goog_123_3_1","args":{},"tts":331186,"id":"0x73b016"}
]

Просмотреть файл

@ -60,7 +60,7 @@ module.exports = {
},
beginDevtoolsLog() {},
endDevtoolsLog() {
return require('../fixtures/perflog.json');
return require('../fixtures/artifacts/perflog/defaultPass.devtoolslog.json');
},
blockUrlPatterns() {
return Promise.resolve();

Просмотреть файл

@ -7,7 +7,7 @@
const NetworkRecorder = require('../../lib/network-recorder');
const assert = require('assert');
const devtoolsLogItems = require('../fixtures/perflog.json');
const devtoolsLogItems = require('../fixtures/artifacts/perflog/defaultPass.devtoolslog.json');
/* eslint-env mocha */
describe('network recorder', function() {

Просмотреть файл

@ -87,18 +87,17 @@ describe('Module Tests', function() {
});
});
it.skip('should return formatted audit results when given no categories', function() {
it('should return formatted LHR when given no categories', function() {
const exampleUrl = 'https://example.com/';
return lighthouse(exampleUrl, {
output: 'json',
}, {
auditResults: [{
score: 0,
displayValue: '',
rawValue: true,
name: 'viewport',
description: 'HTML has a viewport <meta>',
}],
settings: {
auditMode: __dirname + '/fixtures/artifacts/perflog/',
},
audits: [
'viewport',
],
}).then(results => {
assert.ok(results.lighthouseVersion);
assert.ok(results.fetchedAt);
@ -107,6 +106,8 @@ describe('Module Tests', function() {
assert.ok(Array.isArray(results.reportCategories));
assert.equal(results.reportCategories.length, 0);
assert.ok(results.audits.viewport);
assert.strictEqual(results.audits.viewport.score, 0);
assert.ok(results.audits.viewport.debugString);
assert.ok(results.timing);
assert.equal(typeof results.timing.total, 'number');
});

Просмотреть файл

@ -148,25 +148,6 @@ describe('Runner', () => {
});
});
it('accepts existing artifacts', () => {
const url = 'https://example.com';
const config = new Config({
audits: [
'content-width',
],
artifacts: {
ViewportDimensions: {},
},
});
return Runner.run({}, {url, config}).then(results => {
// Mostly checking that this did not throw, but check representative values.
assert.equal(results.initialUrl, url);
assert.strictEqual(results.audits['content-width'].rawValue, true);
});
});
it('accepts audit options', () => {
const url = 'https://example.com';
@ -188,11 +169,13 @@ describe('Runner', () => {
}
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/empty-artifacts/',
},
audits: [
{implementation: EavesdropAudit, options: {x: 1}},
{implementation: EavesdropAudit, options: {x: 2}},
],
artifacts: {},
});
return Runner.run({}, {url, config}).then(results => {
@ -207,15 +190,12 @@ describe('Runner', () => {
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/perflog/',
},
audits: [
'user-timings',
],
artifacts: {
traces: {
[Audit.DEFAULT_PASS]: path.join(__dirname, '/fixtures/traces/trace-user-timings.json'),
},
},
});
return Runner.run({}, {url, config}).then(results => {
@ -255,13 +235,13 @@ describe('Runner', () => {
it('outputs an error audit result when trace required but not provided', () => {
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/empty-artifacts/',
},
audits: [
// requires traces[Audit.DEFAULT_PASS]
'user-timings',
],
artifacts: {
traces: {},
},
});
return Runner.run({}, {url, config}).then(results => {
@ -275,12 +255,13 @@ describe('Runner', () => {
it('outputs an error audit result when missing a required artifact', () => {
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/empty-artifacts/',
},
audits: [
// requires the ViewportDimensions artifact
'content-width',
],
artifacts: {},
});
return Runner.run({}, {url, config}).then(results => {
@ -291,7 +272,9 @@ describe('Runner', () => {
});
});
it('outputs an error audit result when required artifact was a non-fatal Error', () => {
// TODO: need to support save/load of artifact errors.
// See https://github.com/GoogleChrome/lighthouse/issues/4984
it.skip('outputs an error audit result when required artifact was a non-fatal Error', () => {
const errorMessage = 'blurst of times';
const artifactError = new Error(errorMessage);
@ -331,6 +314,9 @@ describe('Runner', () => {
const errorMessage = 'Audit yourself';
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/empty-artifacts/',
},
audits: [
class ThrowyAudit extends Audit {
static get meta() {
@ -341,8 +327,6 @@ describe('Runner', () => {
}
},
],
artifacts: {},
});
return Runner.run({}, {url, config}).then(results => {
@ -357,6 +341,9 @@ describe('Runner', () => {
const errorMessage = 'Uh oh';
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/empty-artifacts/',
},
audits: [
class FatalThrowyAudit extends Audit {
static get meta() {
@ -369,8 +356,6 @@ describe('Runner', () => {
}
},
],
artifacts: {},
});
return Runner.run({}, {url, config}).then(
@ -379,21 +364,15 @@ describe('Runner', () => {
});
});
it('accepts performance logs as an artifact', () => {
it('accepts devtoolsLog in artifacts', () => {
const url = 'https://example.com';
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/perflog/',
},
audits: [
'critical-request-chains',
],
artifacts: {
URL: {
finalUrl: 'https://www.reddit.com/r/nba',
},
devtoolsLogs: {
defaultPass: path.join(__dirname, '/fixtures/perflog.json'),
},
},
});
return Runner.run({}, {url, config}).then(results => {
@ -509,17 +488,18 @@ describe('Runner', () => {
it('results include artifacts when given artifacts and audits', () => {
const url = 'https://example.com';
const ViewportDimensions = {innerHeight: 10, innerWidth: 10};
const config = new Config({
settings: {
auditMode: __dirname + '/fixtures/artifacts/perflog/',
},
audits: [
'content-width',
],
artifacts: {ViewportDimensions},
});
return Runner.run({}, {url, config}).then(results => {
assert.deepEqual(results.artifacts.ViewportDimensions, ViewportDimensions);
assert.strictEqual(results.artifacts.ViewportDimensions.innerWidth, 412);
assert.strictEqual(results.artifacts.ViewportDimensions.innerHeight, 732);
});
});
@ -549,19 +529,18 @@ describe('Runner', () => {
it('includes any LighthouseRunWarnings from artifacts in output', () => {
const url = 'https://example.com';
const LighthouseRunWarnings = [
'warning0',
'warning1',
];
const config = new Config({
artifacts: {
LighthouseRunWarnings,
settings: {
auditMode: __dirname + '/fixtures/artifacts/perflog/',
},
audits: [],
});
return Runner.run(null, {url, config, driverMock}).then(results => {
assert.deepStrictEqual(results.runWarnings, LighthouseRunWarnings);
assert.deepStrictEqual(results.runWarnings, [
'I\'m a warning!',
'Also a warning',
]);
});
});
});

17
typings/config.d.ts поставляемый
Просмотреть файл

@ -7,9 +7,6 @@
import * as Gatherer from '../lighthouse-core/gather/gatherers/gatherer.js';
import * as Audit from '../lighthouse-core/audits/audit.js';
/** From type T, drop set of properties K */
type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>
declare global {
module LH {
/**
@ -21,8 +18,6 @@ declare global {
audits?: Config.AuditDefn[];
categories?: Record<string, Config.Category>;
groups?: Record<string, Config.Group>;
// TODO(bckenny): should be Partial<> maybe? don't want to fail imported json
artifacts?: Artifacts;
}
module Config {
@ -34,7 +29,6 @@ declare global {
passes?: PassJson[];
categories?: Record<string, CategoryJson>;
groups?: GroupJson[];
artifacts?: ArtifactsJson;
}
export interface SettingsJson extends SharedFlagsSettings {
@ -76,17 +70,6 @@ declare global {
description: string;
}
/**
* A LH Artifacts object, but the traces and devtoolsLogs are replaced
* with file paths of json files to import as those artifacts.
* TODO(bckenny): this is to support legacy config.artifacts. Migrate to
* -A instead.
*/
export interface ArtifactsJson extends Omit<Artifacts, 'traces'|'devtoolsLogs'> {
traces: Record<string, string>;
devtoolsLogs: Record<string, string>;
}
/**
* Reference to an audit member of a category and how its score should be
* weighted and how its results grouped with other members.

2
typings/lhr.d.ts поставляемый
Просмотреть файл

@ -21,7 +21,7 @@ declare global {
/** An object containing the results of the audits, keyed by the audits' `id` identifier. */
audits: Record<string, Audit.Result>;
/** The top-level categories, their overall scores, and member audits. */
reportCategories?: Result.Category[];
reportCategories: Result.Category[];
/** Descriptions of the groups referenced by CategoryMembers. */
reportGroups?: Record<string, Result.Group>;