core(predictive-perf): refactor simulation logic (#3489)
* refactor(predictive-perf): use WebInspector.resourceTypes * refactor(predictive-perf): renames and move logic to lib/ * Moves gather/computed/dependency-graph to lib/dependency-graph * Renames estimator -> simulator * Eliminates estimateGraph method from PageDependencyGraph artifact class * Renames the node states in Simulator * Refactors signature of simulateDownloadUntil * destructuring ftw * feedback * switch to nodes array
This commit is contained in:
Родитель
6f9d59363a
Коммит
fc88101818
|
@ -7,8 +7,9 @@
|
|||
|
||||
const Audit = require('./audit');
|
||||
const Util = require('../report/v2/renderer/util.js');
|
||||
const PageDependencyGraph = require('../gather/computed/page-dependency-graph.js');
|
||||
const Node = require('../gather/computed/dependency-graph/node.js');
|
||||
const LoadSimulator = require('../lib/dependency-graph/simulator/simulator.js');
|
||||
const Node = require('../lib/dependency-graph/node.js');
|
||||
const WebInspector = require('../lib/web-inspector');
|
||||
|
||||
// Parameters (in ms) for log-normal CDF scoring. To see the curve:
|
||||
// https://www.desmos.com/calculator/rjp0lbit8y
|
||||
|
@ -75,8 +76,10 @@ class PredictivePerf extends Audit {
|
|||
return dependencyGraph.cloneWithRelationships(node => {
|
||||
// Include everything that might be a long task
|
||||
if (node.type === Node.TYPES.CPU) return node.event.dur > minimumCpuTaskDuration;
|
||||
// Include all scripts and high priority requests
|
||||
return node.resourceType !== 'image' && (node.resourceType === 'script' ||
|
||||
// Include all scripts and high priority requests, exclude all images
|
||||
const isImage = node.record._resourceType === WebInspector.resourceTypes.Image;
|
||||
const isScript = node.record._resourceType === WebInspector.resourceTypes.Script;
|
||||
return !isImage && (isScript ||
|
||||
node.record.priority() === 'High' ||
|
||||
node.record.priority() === 'VeryHigh');
|
||||
});
|
||||
|
@ -123,7 +126,7 @@ class PredictivePerf extends Audit {
|
|||
let sum = 0;
|
||||
const values = {};
|
||||
Object.keys(graphs).forEach(key => {
|
||||
const estimate = PageDependencyGraph.estimateGraph(graphs[key]);
|
||||
const estimate = new LoadSimulator(graphs[key]).simulate();
|
||||
const lastLongTaskEnd = PredictivePerf.getLastLongTaskEndTime(estimate.nodeTiming);
|
||||
|
||||
switch (key) {
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
'use strict';
|
||||
|
||||
const ComputedArtifact = require('./computed-artifact');
|
||||
const NetworkNode = require('./dependency-graph/network-node');
|
||||
const CPUNode = require('./dependency-graph/cpu-node');
|
||||
const GraphEstimator = require('./dependency-graph/estimator/estimator');
|
||||
const NetworkNode = require('../../lib/dependency-graph/network-node');
|
||||
const CPUNode = require('../../lib/dependency-graph/cpu-node');
|
||||
const TracingProcessor = require('../../lib/traces/tracing-processor');
|
||||
const WebInspector = require('../../lib/web-inspector');
|
||||
|
||||
// Tasks smaller than 10 ms have minimal impact on simulation
|
||||
const MINIMUM_TASK_DURATION_OF_INTEREST = 10;
|
||||
|
@ -132,7 +132,8 @@ class PageDependencyGraphArtifact extends ComputedArtifact {
|
|||
static linkCPUNodes(rootNode, networkNodeOutput, cpuNodes) {
|
||||
function addDependentNetworkRequest(cpuNode, reqId) {
|
||||
const networkNode = networkNodeOutput.idToNodeMap.get(reqId);
|
||||
if (!networkNode || networkNode.resourceType !== 'xhr') return;
|
||||
if (!networkNode ||
|
||||
networkNode.record._resourceType !== WebInspector.resourceTypes.XHR) return;
|
||||
cpuNode.addDependent(networkNode);
|
||||
}
|
||||
|
||||
|
@ -233,15 +234,6 @@ class PageDependencyGraphArtifact extends ComputedArtifact {
|
|||
return rootNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimates the duration of the graph and returns individual node timing information.
|
||||
* @param {!Node} rootNode
|
||||
* @return {{timeInMs: number, nodeTiming: !Map<!Node, !NodeTimingData>}}
|
||||
*/
|
||||
static estimateGraph(rootNode) {
|
||||
return new GraphEstimator(rootNode).estimateWithDetails();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {!Node} rootNode
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
'use strict';
|
||||
|
||||
const Node = require('./node');
|
||||
const WebInspector = require('../web-inspector');
|
||||
|
||||
class NetworkNode extends Node {
|
||||
/**
|
||||
|
@ -44,13 +45,6 @@ class NetworkNode extends Node {
|
|||
return this._record;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {?string}
|
||||
*/
|
||||
get resourceType() {
|
||||
return this._record._resourceType && this._record._resourceType._name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {?string}
|
||||
*/
|
||||
|
@ -63,7 +57,8 @@ class NetworkNode extends Node {
|
|||
*/
|
||||
hasRenderBlockingPriority() {
|
||||
const priority = this._record.priority();
|
||||
return priority === 'VeryHigh' || (priority === 'High' && this.resourceType === 'script');
|
||||
const isScript = this._record._resourceType === WebInspector.resourceTypes.Script;
|
||||
return priority === 'VeryHigh' || (priority === 'High' && isScript);
|
||||
}
|
||||
|
||||
/**
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
const Node = require('../node');
|
||||
const TcpConnection = require('./tcp-connection');
|
||||
const emulation = require('../../../../lib/emulation').settings;
|
||||
const emulation = require('../../emulation').settings;
|
||||
|
||||
// see https://cs.chromium.org/search/?q=kDefaultMaxNumDelayableRequestsPerClient&sq=package:chromium&type=cs
|
||||
const DEFAULT_MAXIMUM_CONCURRENT_REQUESTS = 10;
|
||||
|
@ -38,7 +38,14 @@ function groupBy(items, keyFunc) {
|
|||
return grouped;
|
||||
}
|
||||
|
||||
class Estimator {
|
||||
const NodeState = {
|
||||
NotReadyToStart: 0,
|
||||
ReadyToStart: 1,
|
||||
InProgress: 2,
|
||||
Complete: 3,
|
||||
};
|
||||
|
||||
class Simulator {
|
||||
/**
|
||||
* @param {!Node} graph
|
||||
* @param {{rtt: number, throughput: number, fallbackTTFB: number,
|
||||
|
@ -109,7 +116,7 @@ class Estimator {
|
|||
// Even though TTFB is greater than server response time, the RTT is underaccounted for by
|
||||
// not varying per-server and so the difference roughly evens out.
|
||||
// TODO(patrickhulce): investigate a way to identify per-server RTT
|
||||
let estimatedResponseTime = Math.min(...records.map(Estimator.getTTFB));
|
||||
let estimatedResponseTime = Math.min(...records.map(Simulator.getTTFB));
|
||||
|
||||
// If we couldn't find a TTFB for the requests, use the fallback TTFB instead.
|
||||
if (!Number.isFinite(estimatedResponseTime)) {
|
||||
|
@ -132,16 +139,17 @@ class Estimator {
|
|||
}
|
||||
|
||||
/**
|
||||
* Initializes the various state data structures such as _nodesInQueue and _nodesCompleted.
|
||||
* Initializes the various state data structures such as _nodesReadyToStart and _nodesCompleted.
|
||||
*/
|
||||
_initializeAuxiliaryData() {
|
||||
this._nodeTiming = new Map();
|
||||
this._nodesUnprocessed = new Set();
|
||||
this._nodesCompleted = new Set();
|
||||
this._nodesInProgress = new Set();
|
||||
this._nodesInQueue = new Set(); // TODO: replace this with priority queue
|
||||
this._connectionsInUse = new Set();
|
||||
this._numberInProgressByType = new Map();
|
||||
|
||||
this._nodes = [];
|
||||
for (const key of Object.keys(NodeState)) {
|
||||
this._nodes[NodeState[key]] = new Set();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -166,9 +174,9 @@ class Estimator {
|
|||
* @param {!Node} node
|
||||
* @param {number} queuedTime
|
||||
*/
|
||||
_markNodeAsInQueue(node, queuedTime) {
|
||||
this._nodesInQueue.add(node);
|
||||
this._nodesUnprocessed.delete(node);
|
||||
_markNodeAsReadyToStart(node, queuedTime) {
|
||||
this._nodes[NodeState.ReadyToStart].add(node);
|
||||
this._nodes[NodeState.NotReadyToStart].delete(node);
|
||||
this._setTimingData(node, {queuedTime});
|
||||
}
|
||||
|
||||
|
@ -177,8 +185,8 @@ class Estimator {
|
|||
* @param {number} startTime
|
||||
*/
|
||||
_markNodeAsInProgress(node, startTime) {
|
||||
this._nodesInQueue.delete(node);
|
||||
this._nodesInProgress.add(node);
|
||||
this._nodes[NodeState.InProgress].add(node);
|
||||
this._nodes[NodeState.ReadyToStart].delete(node);
|
||||
this._numberInProgressByType.set(node.type, this._numberInProgress(node.type) + 1);
|
||||
this._setTimingData(node, {startTime});
|
||||
}
|
||||
|
@ -188,8 +196,8 @@ class Estimator {
|
|||
* @param {number} endTime
|
||||
*/
|
||||
_markNodeAsComplete(node, endTime) {
|
||||
this._nodesCompleted.add(node);
|
||||
this._nodesInProgress.delete(node);
|
||||
this._nodes[NodeState.Complete].add(node);
|
||||
this._nodes[NodeState.InProgress].delete(node);
|
||||
this._numberInProgressByType.set(node.type, this._numberInProgress(node.type) - 1);
|
||||
this._setTimingData(node, {endTime});
|
||||
|
||||
|
@ -197,10 +205,10 @@ class Estimator {
|
|||
for (const dependent of node.getDependents()) {
|
||||
// Skip dependent node if one of its dependencies hasn't finished yet
|
||||
const dependencies = dependent.getDependencies();
|
||||
if (dependencies.some(dependency => !this._nodesCompleted.has(dependency))) continue;
|
||||
if (dependencies.some(dep => !this._nodes[NodeState.Complete].has(dep))) continue;
|
||||
|
||||
// Otherwise add it to the queue
|
||||
this._markNodeAsInQueue(dependent, endTime);
|
||||
this._markNodeAsReadyToStart(dependent, endTime);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -248,7 +256,7 @@ class Estimator {
|
|||
*/
|
||||
_updateNetworkCapacity() {
|
||||
for (const connection of this._connectionsInUse) {
|
||||
connection.setThroughput(this._throughput / this._nodesInProgress.size);
|
||||
connection.setThroughput(this._throughput / this._nodes[NodeState.InProgress].size);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,7 +286,7 @@ class Estimator {
|
|||
const connection = this._connections.get(node.record.connectionId);
|
||||
const calculation = connection.simulateDownloadUntil(
|
||||
node.record.transferSize - timingData.bytesDownloaded,
|
||||
timingData.timeElapsed
|
||||
{timeAlreadyElapsed: timingData.timeElapsed}
|
||||
);
|
||||
|
||||
const estimatedTimeElapsed = calculation.timeElapsed + timingData.timeElapsedOvershoot;
|
||||
|
@ -292,7 +300,7 @@ class Estimator {
|
|||
*/
|
||||
_findNextNodeCompletionTime() {
|
||||
let minimumTime = Infinity;
|
||||
for (const node of this._nodesInProgress) {
|
||||
for (const node of this._nodes[NodeState.InProgress]) {
|
||||
minimumTime = Math.min(minimumTime, this._estimateTimeRemaining(node));
|
||||
}
|
||||
|
||||
|
@ -320,8 +328,10 @@ class Estimator {
|
|||
const connection = this._connections.get(node.record.connectionId);
|
||||
const calculation = connection.simulateDownloadUntil(
|
||||
node.record.transferSize - timingData.bytesDownloaded,
|
||||
timingData.timeElapsed,
|
||||
timePeriodLength - timingData.timeElapsedOvershoot
|
||||
{
|
||||
timeAlreadyElapsed: timingData.timeElapsed,
|
||||
maximumTimeToElapse: timePeriodLength - timingData.timeElapsedOvershoot,
|
||||
}
|
||||
);
|
||||
|
||||
connection.setCongestionWindow(calculation.congestionWindow);
|
||||
|
@ -342,31 +352,31 @@ class Estimator {
|
|||
* Estimates the time taken to process all of the graph's nodes.
|
||||
* @return {{timeInMs: number, nodeTiming: !Map<!Node, !NodeTimingData>}}
|
||||
*/
|
||||
estimateWithDetails() {
|
||||
simulate() {
|
||||
// initialize all the necessary data containers
|
||||
this._initializeNetworkRecords();
|
||||
this._initializeNetworkConnections();
|
||||
this._initializeAuxiliaryData();
|
||||
|
||||
const nodesUnprocessed = this._nodesUnprocessed;
|
||||
const nodesInQueue = this._nodesInQueue;
|
||||
const nodesInProgress = this._nodesInProgress;
|
||||
const nodesNotReadyToStart = this._nodes[NodeState.NotReadyToStart];
|
||||
const nodesReadyToStart = this._nodes[NodeState.ReadyToStart];
|
||||
const nodesInProgress = this._nodes[NodeState.InProgress];
|
||||
|
||||
const rootNode = this._graph.getRootNode();
|
||||
rootNode.traverse(node => nodesUnprocessed.add(node));
|
||||
rootNode.traverse(node => nodesNotReadyToStart.add(node));
|
||||
|
||||
let depth = 0;
|
||||
let totalElapsedTime = 0;
|
||||
|
||||
// add root node to queue
|
||||
this._markNodeAsInQueue(rootNode, totalElapsedTime);
|
||||
// root node is always ready to start
|
||||
this._markNodeAsReadyToStart(rootNode, totalElapsedTime);
|
||||
|
||||
// loop as long as we have nodes in the queue or currently in progress
|
||||
while (nodesInQueue.size || nodesInProgress.size) {
|
||||
while (nodesReadyToStart.size || nodesInProgress.size) {
|
||||
depth++;
|
||||
|
||||
// move all possible queued nodes to in progress
|
||||
for (const node of nodesInQueue) {
|
||||
for (const node of nodesReadyToStart) {
|
||||
this._startNodeIfPossible(node, totalElapsedTime);
|
||||
}
|
||||
|
||||
|
@ -387,8 +397,8 @@ class Estimator {
|
|||
}
|
||||
}
|
||||
|
||||
if (nodesUnprocessed.size !== 0) {
|
||||
throw new Error(`Cycle detected: ${nodesUnprocessed.size} unused nodes`);
|
||||
if (nodesNotReadyToStart.size !== 0) {
|
||||
throw new Error(`Cycle detected: ${nodesNotReadyToStart.size} unused nodes`);
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -396,16 +406,9 @@ class Estimator {
|
|||
nodeTiming: this._nodeTiming,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
estimate() {
|
||||
return this.estimateWithDetails().timeInMs;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Estimator;
|
||||
module.exports = Simulator;
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
|
@ -415,4 +418,4 @@ module.exports = Estimator;
|
|||
* bytesDownloaded: number|undefined,
|
||||
* }}
|
||||
*/
|
||||
Estimator.NodeTimingData; // eslint-disable-line no-unused-expressions
|
||||
Simulator.NodeTimingData; // eslint-disable-line no-unused-expressions
|
|
@ -89,11 +89,12 @@ class TcpConnection {
|
|||
* https://hpbn.co/transport-layer-security-tls/#tls-handshake for details.
|
||||
*
|
||||
* @param {number} bytesToDownload
|
||||
* @param {number=} timeAlreadyElapsed
|
||||
* @param {number=} maximumTimeToElapse
|
||||
* @param {{timeAlreadyElapsed: number, maximumTimeToElapse}=} options
|
||||
* @return {{timeElapsed: number, roundTrips: number, bytesDownloaded: number, congestionWindow: number}}
|
||||
*/
|
||||
simulateDownloadUntil(bytesToDownload, timeAlreadyElapsed = 0, maximumTimeToElapse = Infinity) {
|
||||
simulateDownloadUntil(bytesToDownload, options) {
|
||||
const {timeAlreadyElapsed = 0, maximumTimeToElapse = Infinity} = options || {};
|
||||
|
||||
if (this._warmed && this._h2) {
|
||||
bytesToDownload -= this._h2OverflowBytesDownloaded;
|
||||
}
|
|
@ -260,7 +260,6 @@ class Runner {
|
|||
const computedArtifacts = {};
|
||||
const filenamesToSkip = [
|
||||
'computed-artifact.js', // the base class which other artifacts inherit
|
||||
'dependency-graph', // a folder containing dependencies, not an artifact
|
||||
];
|
||||
|
||||
require('fs').readdirSync(__dirname + '/gather/computed').forEach(function(filename) {
|
||||
|
|
|
@ -6,18 +6,19 @@
|
|||
'use strict';
|
||||
|
||||
const PageDependencyGraph = require('../../../gather/computed/page-dependency-graph');
|
||||
const Node = require('../../../gather/computed/dependency-graph/node');
|
||||
const Node = require('../../../lib/dependency-graph/node');
|
||||
const Runner = require('../../../runner.js');
|
||||
const WebInspector = require('../../../lib/web-inspector');
|
||||
|
||||
const sampleTrace = require('../../fixtures/traces/progressive-app-m60.json');
|
||||
const sampleDevtoolsLog = require('../../fixtures/traces/progressive-app-m60.devtools.log.json');
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
function createRequest(requestId, url, startTime = 0, _initiator = null, resourceType = '') {
|
||||
function createRequest(requestId, url, startTime = 0, _initiator = null, _resourceType = null) {
|
||||
startTime = startTime / 1000;
|
||||
const endTime = startTime + .1;
|
||||
return {requestId, url, startTime, endTime, _initiator, _resourceType: {_name: resourceType}};
|
||||
return {requestId, url, startTime, endTime, _initiator, _resourceType};
|
||||
}
|
||||
|
||||
/* eslint-env mocha */
|
||||
|
@ -158,7 +159,7 @@ describe('PageDependencyGraph computed artifact:', () => {
|
|||
const request1 = createRequest(1, '1', 0);
|
||||
const request2 = createRequest(2, '2', 50);
|
||||
const request3 = createRequest(3, '3', 50);
|
||||
const request4 = createRequest(4, '4', 300, null, 'xhr');
|
||||
const request4 = createRequest(4, '4', 300, null, WebInspector.resourceTypes.XHR);
|
||||
const networkRecords = [request1, request2, request3, request4];
|
||||
|
||||
addTaskEvents(200, 200, [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
'use strict';
|
||||
|
||||
const Node = require('../../../../gather/computed/dependency-graph/node');
|
||||
const Node = require('../../../lib/dependency-graph/node');
|
||||
|
||||
const assert = require('assert');
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
'use strict';
|
||||
|
||||
const NetworkNode = require('../../../../../gather/computed/dependency-graph/network-node');
|
||||
const CpuNode = require('../../../../../gather/computed/dependency-graph/cpu-node');
|
||||
const Estimator = require('../../../../../gather/computed/dependency-graph/estimator/estimator');
|
||||
const NetworkNode = require('../../../../lib/dependency-graph/network-node');
|
||||
const CpuNode = require('../../../../lib/dependency-graph/cpu-node');
|
||||
const Simulator = require('../../../../lib/dependency-graph/simulator/simulator');
|
||||
|
||||
const assert = require('assert');
|
||||
let nextRequestId = 1;
|
||||
|
@ -36,28 +36,39 @@ function cpuTask({tid, ts, duration}) {
|
|||
}
|
||||
|
||||
/* eslint-env mocha */
|
||||
describe('DependencyGraph/Estimator', () => {
|
||||
describe('.estimate', () => {
|
||||
it('should estimate basic network graphs', () => {
|
||||
describe('DependencyGraph/Simulator', () => {
|
||||
describe('.simulate', () => {
|
||||
function assertNodeTiming(result, node, assertions) {
|
||||
const timing = result.nodeTiming.get(node);
|
||||
assert.ok(timing, 'missing node timing information');
|
||||
Object.keys(assertions).forEach(key => {
|
||||
assert.equal(timing[key], assertions[key]);
|
||||
});
|
||||
}
|
||||
|
||||
it('should simulate basic network graphs', () => {
|
||||
const rootNode = new NetworkNode(request({}));
|
||||
const estimator = new Estimator(rootNode, {fallbackTTFB: 500});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(rootNode, {fallbackTTFB: 500});
|
||||
const result = simulator.simulate();
|
||||
// should be 2 RTTs and 500ms for the server response time
|
||||
assert.equal(result, 300 + 500);
|
||||
assert.equal(result.timeInMs, 300 + 500);
|
||||
assertNodeTiming(result, rootNode, {startTime: 0, endTime: 800});
|
||||
});
|
||||
|
||||
it('should estimate basic mixed graphs', () => {
|
||||
it('should simulate basic mixed graphs', () => {
|
||||
const rootNode = new NetworkNode(request({}));
|
||||
const cpuNode = new CpuNode(cpuTask({duration: 200}));
|
||||
cpuNode.addDependency(rootNode);
|
||||
|
||||
const estimator = new Estimator(rootNode, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(rootNode, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = simulator.simulate();
|
||||
// should be 2 RTTs and 500ms for the server response time + 200 CPU
|
||||
assert.equal(result, 300 + 500 + 200);
|
||||
assert.equal(result.timeInMs, 300 + 500 + 200);
|
||||
assertNodeTiming(result, rootNode, {startTime: 0, endTime: 800});
|
||||
assertNodeTiming(result, cpuNode, {startTime: 800, endTime: 1000});
|
||||
});
|
||||
|
||||
it('should estimate basic network waterfall graphs', () => {
|
||||
it('should simulate basic network waterfall graphs', () => {
|
||||
const nodeA = new NetworkNode(request({connectionId: 1}));
|
||||
const nodeB = new NetworkNode(request({connectionId: 2}));
|
||||
const nodeC = new NetworkNode(request({connectionId: 3}));
|
||||
|
@ -67,13 +78,17 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeB.addDependent(nodeC);
|
||||
nodeC.addDependent(nodeD);
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms each for A, B, C, D
|
||||
assert.equal(result, 3200);
|
||||
assert.equal(result.timeInMs, 3200);
|
||||
assertNodeTiming(result, nodeA, {startTime: 0, endTime: 800});
|
||||
assertNodeTiming(result, nodeB, {startTime: 800, endTime: 1600});
|
||||
assertNodeTiming(result, nodeC, {startTime: 1600, endTime: 2400});
|
||||
assertNodeTiming(result, nodeD, {startTime: 2400, endTime: 3200});
|
||||
});
|
||||
|
||||
it('should estimate basic CPU queue graphs', () => {
|
||||
it('should simulate basic CPU queue graphs', () => {
|
||||
const nodeA = new NetworkNode(request({connectionId: 1}));
|
||||
const nodeB = new CpuNode(cpuTask({duration: 100}));
|
||||
const nodeC = new CpuNode(cpuTask({duration: 600}));
|
||||
|
@ -83,13 +98,17 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeA.addDependent(nodeC);
|
||||
nodeA.addDependent(nodeD);
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms A, then 1000 ms total for B, C, D in serial
|
||||
assert.equal(result, 1800);
|
||||
assert.equal(result.timeInMs, 1800);
|
||||
assertNodeTiming(result, nodeA, {startTime: 0, endTime: 800});
|
||||
assertNodeTiming(result, nodeB, {startTime: 800, endTime: 900});
|
||||
assertNodeTiming(result, nodeC, {startTime: 900, endTime: 1500});
|
||||
assertNodeTiming(result, nodeD, {startTime: 1500, endTime: 1800});
|
||||
});
|
||||
|
||||
it('should estimate basic network waterfall graphs with CPU', () => {
|
||||
it('should simulate basic network waterfall graphs with CPU', () => {
|
||||
const nodeA = new NetworkNode(request({connectionId: 1}));
|
||||
const nodeB = new NetworkNode(request({connectionId: 2}));
|
||||
const nodeC = new NetworkNode(request({connectionId: 3}));
|
||||
|
@ -103,13 +122,13 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeC.addDependent(nodeD);
|
||||
nodeC.addDependent(nodeF); // finishes 400 ms after D
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500, cpuTaskMultiplier: 5});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms each for A, B, C, D, with F finishing 400 ms after D
|
||||
assert.equal(result, 3600);
|
||||
assert.equal(result.timeInMs, 3600);
|
||||
});
|
||||
|
||||
it('should estimate basic parallel requests', () => {
|
||||
it('should simulate basic parallel requests', () => {
|
||||
const nodeA = new NetworkNode(request({connectionId: 1}));
|
||||
const nodeB = new NetworkNode(request({connectionId: 2}));
|
||||
const nodeC = new NetworkNode(request({connectionId: 3, transferSize: 15000}));
|
||||
|
@ -119,10 +138,10 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeA.addDependent(nodeC);
|
||||
nodeA.addDependent(nodeD);
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms for A and 950ms for C (2 round trips of downloading)
|
||||
assert.equal(result, 800 + 950);
|
||||
assert.equal(result.timeInMs, 800 + 950);
|
||||
});
|
||||
|
||||
it('should not reuse connections', () => {
|
||||
|
@ -135,10 +154,10 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeA.addDependent(nodeC);
|
||||
nodeA.addDependent(nodeD);
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms for A and 650ms for the next 3
|
||||
assert.equal(result, 800 + 650 * 3);
|
||||
assert.equal(result.timeInMs, 800 + 650 * 3);
|
||||
});
|
||||
|
||||
it('should adjust throughput based on number of requests', () => {
|
||||
|
@ -151,10 +170,10 @@ describe('DependencyGraph/Estimator', () => {
|
|||
nodeA.addDependent(nodeC);
|
||||
nodeA.addDependent(nodeD);
|
||||
|
||||
const estimator = new Estimator(nodeA, {fallbackTTFB: 500});
|
||||
const result = estimator.estimate();
|
||||
const simulator = new Simulator(nodeA, {fallbackTTFB: 500});
|
||||
const result = simulator.simulate();
|
||||
// should be 800ms for A and 950ms for C (2 round trips of downloading)
|
||||
assert.equal(result, 800 + 950);
|
||||
assert.equal(result.timeInMs, 800 + 950);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -6,12 +6,12 @@
|
|||
'use strict';
|
||||
|
||||
// eslint-disable-next-line
|
||||
const TcpConnection = require('../../../../../gather/computed/dependency-graph/estimator/tcp-connection');
|
||||
const TcpConnection = require('../../../../lib/dependency-graph/simulator/tcp-connection');
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
/* eslint-env mocha */
|
||||
describe('DependencyGraph/Estimator/TcpConnection', () => {
|
||||
describe('DependencyGraph/Simulator/TcpConnection', () => {
|
||||
describe('#constructor', () => {
|
||||
it('should create the connection', () => {
|
||||
const rtt = 150;
|
||||
|
@ -144,7 +144,7 @@ describe('DependencyGraph/Estimator/TcpConnection', () => {
|
|||
|
||||
it('should provide the correct values resumed small payload', () => {
|
||||
const connection = new TcpConnection(100, Infinity, 0, true);
|
||||
assert.deepEqual(connection.simulateDownloadUntil(7300, 250), {
|
||||
assert.deepEqual(connection.simulateDownloadUntil(7300, {timeAlreadyElapsed: 250}), {
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
|
@ -170,60 +170,81 @@ describe('DependencyGraph/Estimator/TcpConnection', () => {
|
|||
const connection = new TcpConnection(100, 8 * 1000 * 1000);
|
||||
const bytesToDownload = 5 * 1000 * 1000; // 5 mb
|
||||
connection.setCongestionWindow(68);
|
||||
assert.deepEqual(connection.simulateDownloadUntil(bytesToDownload, 5234), {
|
||||
bytesDownloaded: bytesToDownload,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 68,
|
||||
roundTrips: 51, // 5 mb / (1460 * 68)
|
||||
timeElapsed: 5100,
|
||||
});
|
||||
assert.deepEqual(
|
||||
connection.simulateDownloadUntil(bytesToDownload, {timeAlreadyElapsed: 5234}),
|
||||
{
|
||||
bytesDownloaded: bytesToDownload,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 68,
|
||||
roundTrips: 51, // 5 mb / (1460 * 68)
|
||||
timeElapsed: 5100,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
context('when maximumTime is set', () => {
|
||||
it('should provide the correct values less than TTFB', () => {
|
||||
const connection = new TcpConnection(100, Infinity, 0, false);
|
||||
assert.deepEqual(connection.simulateDownloadUntil(7300, 0, 68), {
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 200,
|
||||
});
|
||||
assert.deepEqual(
|
||||
connection.simulateDownloadUntil(7300, {timeAlreadyElapsed: 0, maximumTimeToElapse: 68}),
|
||||
{
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 200,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should provide the correct values just over TTFB', () => {
|
||||
const connection = new TcpConnection(100, Infinity, 0, false);
|
||||
assert.deepEqual(connection.simulateDownloadUntil(7300, 0, 250), {
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 200,
|
||||
});
|
||||
assert.deepEqual(
|
||||
connection.simulateDownloadUntil(7300, {timeAlreadyElapsed: 0, maximumTimeToElapse: 250}),
|
||||
{
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 200,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should provide the correct values with already elapsed', () => {
|
||||
const connection = new TcpConnection(100, Infinity, 0, false);
|
||||
assert.deepEqual(connection.simulateDownloadUntil(7300, 75, 250), {
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 125,
|
||||
});
|
||||
assert.deepEqual(
|
||||
connection.simulateDownloadUntil(7300, {
|
||||
timeAlreadyElapsed: 75,
|
||||
maximumTimeToElapse: 250,
|
||||
}),
|
||||
{
|
||||
bytesDownloaded: 7300,
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 10,
|
||||
roundTrips: 2,
|
||||
timeElapsed: 125,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should provide the correct values large payloads', () => {
|
||||
const connection = new TcpConnection(100, 8 * 1000 * 1000);
|
||||
const bytesToDownload = 10 * 1000 * 1000; // 10 mb
|
||||
assert.deepEqual(connection.simulateDownloadUntil(bytesToDownload, 500, 740), {
|
||||
bytesDownloaded: 683280, // should be less than 68 * 1460 * 8
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 68,
|
||||
roundTrips: 8,
|
||||
timeElapsed: 800, // skips the handshake because time already elapsed
|
||||
});
|
||||
assert.deepEqual(
|
||||
connection.simulateDownloadUntil(bytesToDownload, {
|
||||
timeAlreadyElapsed: 500,
|
||||
maximumTimeToElapse: 740,
|
||||
}),
|
||||
{
|
||||
bytesDownloaded: 683280, // should be less than 68 * 1460 * 8
|
||||
extraBytesDownloaded: 0,
|
||||
congestionWindow: 68,
|
||||
roundTrips: 8,
|
||||
timeElapsed: 800, // skips the handshake because time already elapsed
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should all add up', () => {
|
||||
|
@ -233,25 +254,26 @@ describe('DependencyGraph/Estimator/TcpConnection', () => {
|
|||
const secondStoppingPoint = 315;
|
||||
const thirdStoppingPoint = 10500 - firstStoppingPoint - secondStoppingPoint;
|
||||
|
||||
const firstSegment = connection.simulateDownloadUntil(
|
||||
bytesToDownload,
|
||||
0,
|
||||
firstStoppingPoint
|
||||
);
|
||||
const firstSegment = connection.simulateDownloadUntil(bytesToDownload, {
|
||||
timeAlreadyElapsed: 0,
|
||||
maximumTimeToElapse: firstStoppingPoint,
|
||||
});
|
||||
const firstOvershoot = firstSegment.timeElapsed - firstStoppingPoint;
|
||||
|
||||
connection.setCongestionWindow(firstSegment.congestionWindow);
|
||||
const secondSegment = connection.simulateDownloadUntil(
|
||||
bytesToDownload - firstSegment.bytesDownloaded,
|
||||
firstSegment.timeElapsed,
|
||||
secondStoppingPoint - firstOvershoot
|
||||
{
|
||||
timeAlreadyElapsed: firstSegment.timeElapsed,
|
||||
maximumTimeToElapse: secondStoppingPoint - firstOvershoot,
|
||||
}
|
||||
);
|
||||
const secondOvershoot = firstOvershoot + secondSegment.timeElapsed - secondStoppingPoint;
|
||||
|
||||
connection.setCongestionWindow(secondSegment.congestionWindow);
|
||||
const thirdSegment = connection.simulateDownloadUntil(
|
||||
bytesToDownload - firstSegment.bytesDownloaded - secondSegment.bytesDownloaded,
|
||||
firstSegment.timeElapsed + secondSegment.timeElapsed
|
||||
{timeAlreadyElapsed: firstSegment.timeElapsed + secondSegment.timeElapsed}
|
||||
);
|
||||
const thirdOvershoot = secondOvershoot + thirdSegment.timeElapsed - thirdStoppingPoint;
|
||||
|
Загрузка…
Ссылка в новой задаче