Bug 1105014 - Part 1: Sync actor times and overview selection with all the detail views in the new performance tool, r=jsantell

This commit is contained in:
Victor Porof 2015-01-22 12:20:55 -05:00
Родитель 6b6e649c8d
Коммит 1776473d4f
20 изменённых файлов: 700 добавлений и 463 удалений

Просмотреть файл

@ -31,6 +31,8 @@ let SharedPerformanceActors = new WeakMap();
*
* @param Target target
* The target owning this connection.
* @return PerformanceActorsConnection
* The shared connection for the specified target.
*/
SharedPerformanceActors.forTarget = function(target) {
if (this.has(target)) {
@ -140,12 +142,13 @@ PerformanceActorsConnection.prototype = {
this._timeline = new TimelineFront(this._target.client, this._target.form);
} else {
this._timeline = {
start: () => {},
stop: () => {},
start: () => 0,
stop: () => 0,
isRecording: () => false,
on: () => {},
off: () => {},
destroy: () => {}
on: () => null,
off: () => null,
once: () => promise.reject(),
destroy: () => null
};
}
},
@ -211,36 +214,41 @@ PerformanceFront.prototype = {
/**
* Manually begins a recording session.
*
* @param object options
* @param object timelineOptions
* An options object to pass to the timeline front. Supported
* properties are `withTicks` and `withMemory`.
* @return object
* A promise that is resolved once recording has started.
*/
startRecording: Task.async(function*(options = {}) {
let { isActive, currentTime } = yield this._request("profiler", "isActive");
startRecording: Task.async(function*(timelineOptions = {}) {
let profilerStatus = yield this._request("profiler", "isActive");
let profilerStartTime;
// Start the profiler only if it wasn't already active. The built-in
// nsIPerformance module will be kept recording, because it's the same instance
// for all targets and interacts with the whole platform, so we don't want
// to affect other clients by stopping (or restarting) it.
if (!isActive) {
if (!profilerStatus.isActive) {
// Extend the profiler options so that protocol.js doesn't modify the original.
let profilerOptions = extend({}, this._customProfilerOptions);
yield this._request("profiler", "startProfiler", profilerOptions);
this._profilingStartTime = 0;
profilerStartTime = 0;
this.emit("profiler-activated");
} else {
this._profilingStartTime = currentTime;
profilerStartTime = profilerStatus.currentTime;
this.emit("profiler-already-active");
}
// The timeline actor is target-dependent, so just make sure
// it's recording.
let startTime = yield this._request("timeline", "start", options);
// The timeline actor is target-dependent, so just make sure it's recording.
// It won't, however, be available in older Geckos (FF < 35).
let timelineStartTime = yield this._request("timeline", "start", timelineOptions);
// Return only the start time from the timeline actor.
return { startTime };
// Return the start times from the two actors. They will be used to
// synchronize the profiler and timeline data.
return {
profilerStartTime,
timelineStartTime
};
}),
/**
@ -251,19 +259,15 @@ PerformanceFront.prototype = {
* with the profiler and timeline data.
*/
stopRecording: Task.async(function*() {
// We'll need to filter out all samples that fall out of current profile's
// range. This is necessary because the profiler is continuously running.
let timelineEndTime = yield this._request("timeline", "stop");
let profilerData = yield this._request("profiler", "getProfile");
filterSamples(profilerData, this._profilingStartTime);
offsetSampleTimes(profilerData, this._profilingStartTime);
let endTime = yield this._request("timeline", "stop");
// Join all the acquired data and return it for outside consumers.
// Return the end times from the two actors. They will be used to
// synchronize the profiler and timeline data.
return {
recordingDuration: profilerData.currentTime - this._profilingStartTime,
profilerData: profilerData,
endTime: endTime
profile: profilerData.profile,
profilerEndTime: profilerData.currentTime,
timelineEndTime: timelineEndTime
};
}),
@ -280,39 +284,6 @@ PerformanceFront.prototype = {
}
};
/**
* Filters all the samples in the provided profiler data to be more recent
* than the specified start time.
*
* @param object profilerData
* The profiler data received from the backend.
* @param number profilingStartTime
* The earliest acceptable sample time (in milliseconds).
*/
function filterSamples(profilerData, profilingStartTime) {
let firstThread = profilerData.profile.threads[0];
firstThread.samples = firstThread.samples.filter(e => {
return e.time >= profilingStartTime;
});
}
/**
* Offsets all the samples in the provided profiler data by the specified time.
*
* @param object profilerData
* The profiler data received from the backend.
* @param number timeOffset
* The amount of time to offset by (in milliseconds).
*/
function offsetSampleTimes(profilerData, timeOffset) {
let firstThreadSamples = profilerData.profile.threads[0].samples;
for (let sample of firstThreadSamples) {
sample.time -= timeOffset;
}
}
/**
* A collection of small wrappers promisifying functions invoking callbacks.
*/

Просмотреть файл

@ -130,21 +130,16 @@ function isValidSerializerVersion (version) {
function convertLegacyData (legacyData) {
let { profilerData, ticksData, recordingDuration } = legacyData;
// The `profilerData` stays, and the previously unrecorded fields
// just are empty arrays.
// The `profilerData` and `ticksData` stay, but the previously unrecorded
// fields just are empty arrays.
let data = {
label: profilerData.profilerLabel,
duration: recordingDuration,
markers: [],
frames: [],
memory: [],
ticks: ticksData,
profilerData: profilerData,
// Data from the original profiler won't contain `interval` fields,
// but a recording duration, as well as the current time, which can be used
// to infer the interval startTime and endTime.
interval: {
startTime: profilerData.currentTime - recordingDuration,
endTime: profilerData.currentTime
}
profile: profilerData.profile
};
return data;

Просмотреть файл

@ -3,13 +3,17 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { PerformanceIO } = require("devtools/performance/io");
const { Cc, Ci, Cu, Cr } = require("chrome");
loader.lazyRequireGetter(this, "PerformanceIO",
"devtools/performance/io", true);
loader.lazyRequireGetter(this, "RecordingUtils",
"devtools/performance/recording-utils", true);
const RECORDING_IN_PROGRESS = exports.RECORDING_IN_PROGRESS = -1;
const RECORDING_UNAVAILABLE = exports.RECORDING_UNAVAILABLE = null;
/**
* Model for a wholistic profile, containing start/stop times, profiling data, frames data,
* timeline (marker, tick, memory) data, and methods to start/stop recording.
* Model for a wholistic profile, containing the duration, profiling data,
* frames data, timeline (marker, tick, memory) data, and methods to mark
* a recording as 'in progress' or 'finished'.
*/
const RecordingModel = function (options={}) {
@ -19,17 +23,20 @@ const RecordingModel = function (options={}) {
};
RecordingModel.prototype = {
_localStartTime: RECORDING_UNAVAILABLE,
_startTime: RECORDING_UNAVAILABLE,
_endTime: RECORDING_UNAVAILABLE,
_markers: [],
_frames: [],
_ticks: [],
_memory: [],
_profilerData: {},
_label: "",
// Private fields, only needed when a recording is started or stopped.
_imported: false,
_isRecording: false,
_recording: false,
_profilerStartTime: 0,
_timelineStartTime: 0,
// Serializable fields, necessary and sufficient for import and export.
_label: "",
_duration: 0,
_markers: null,
_frames: null,
_ticks: null,
_memory: null,
_profile: null,
/**
* Loads a recording from a file.
@ -41,16 +48,13 @@ RecordingModel.prototype = {
let recordingData = yield PerformanceIO.loadRecordingFromFile(file);
this._imported = true;
this._label = recordingData.profilerData.profilerLabel || "";
this._startTime = recordingData.interval.startTime;
this._endTime = recordingData.interval.endTime;
this._label = recordingData.label || "";
this._duration = recordingData.duration;
this._markers = recordingData.markers;
this._frames = recordingData.frames;
this._memory = recordingData.memory;
this._ticks = recordingData.ticks;
this._profilerData = recordingData.profilerData;
return recordingData;
this._profile = recordingData.profile;
}),
/**
@ -65,24 +69,24 @@ RecordingModel.prototype = {
}),
/**
* Starts recording with the PerformanceFront, storing the start times
* on the model.
* Starts recording with the PerformanceFront.
*
* @param object options
* An options object to pass to the timeline front. Supported
* properties are `withTicks` and `withMemory`.
*/
startRecording: Task.async(function *() {
startRecording: Task.async(function *(options = {}) {
// Times must come from the actor in order to be self-consistent.
// However, we also want to update the view with the elapsed time
// even when the actor is not generating data. To do this we get
// the local time and use it to compute a reasonable elapsed time.
this._localStartTime = this._performance.now();
let { startTime } = yield this._front.startRecording({
withTicks: true,
withMemory: true
});
this._isRecording = true;
let info = yield this._front.startRecording(options);
this._profilerStartTime = info.profilerStartTime;
this._timelineStartTime = info.timelineStartTime;
this._recording = true;
this._startTime = startTime;
this._endTime = RECORDING_IN_PROGRESS;
this._markers = [];
this._frames = [];
this._memory = [];
@ -90,63 +94,46 @@ RecordingModel.prototype = {
}),
/**
* Stops recording with the PerformanceFront, storing the end times
* on the model.
* Stops recording with the PerformanceFront.
*/
stopRecording: Task.async(function *() {
let results = yield this._front.stopRecording();
this._isRecording = false;
let info = yield this._front.stopRecording();
this._profile = info.profile;
this._duration = info.profilerEndTime - this._profilerStartTime;
this._recording = false;
// If `endTime` is not yielded from timeline actor (< Fx36), fake it.
if (!results.endTime) {
results.endTime = this._startTime + this.getLocalElapsedTime();
}
// We'll need to filter out all samples that fall out of current profile's
// range since the profiler is continuously running. Because of this, sample
// times are not guaranteed to have a zero epoch, so offset the timestamps.
RecordingUtils.filterSamples(this._profile, this._profilerStartTime);
RecordingUtils.offsetSampleTimes(this._profile, this._profilerStartTime);
this._endTime = results.endTime;
this._profilerData = results.profilerData;
this._markers = this._markers.sort((a,b) => (a.start > b.start));
return results;
// Markers need to be sorted ascending by time, to be properly displayed
// in a waterfall view.
this._markers = this._markers.sort((a, b) => (a.start > b.start));
}),
/**
* Returns the profile's label, from `console.profile(LABEL)`.
* Gets the profile's label, from `console.profile(LABEL)`.
* @return string
*/
getLabel: function () {
return this._label;
},
/**
* Gets the amount of time elapsed locally after starting a recording.
*/
getLocalElapsedTime: function () {
return this._performance.now() - this._localStartTime;
},
/**
* Returns duration of this recording, in milliseconds.
* Gets duration of this recording, in milliseconds.
* @return number
*/
getDuration: function () {
let { startTime, endTime } = this.getInterval();
return endTime - startTime;
},
/**
* Gets the time interval for the current recording.
* @return object
*/
getInterval: function() {
let startTime = this._startTime;
let endTime = this._endTime;
// Compute an approximate ending time for the current recording. This is
// needed to ensure that the view updates even when new data is
// not being generated.
if (endTime == RECORDING_IN_PROGRESS) {
endTime = startTime + this.getLocalElapsedTime();
// Compute an approximate ending time for the current recording if it is
// still in progress. This is needed to ensure that the view updates even
// when new data is not being generated.
if (this._recording) {
return this._performance.now() - this._localStartTime;
} else {
return this._duration;
}
return { startTime, endTime };
},
/**
@ -185,21 +172,22 @@ RecordingModel.prototype = {
* Gets the profiler data in this recording.
* @return array
*/
getProfilerData: function() {
return this._profilerData;
getProfile: function() {
return this._profile;
},
/**
* Gets all the data in this recording.
*/
getAllData: function() {
let interval = this.getInterval();
let label = this.getLabel();
let duration = this.getDuration();
let markers = this.getMarkers();
let frames = this.getFrames();
let memory = this.getMemory();
let ticks = this.getTicks();
let profilerData = this.getProfilerData();
return { interval, markers, frames, memory, ticks, profilerData };
let profile = this.getProfile();
return { label, duration, markers, frames, memory, ticks, profile };
},
/**
@ -207,7 +195,7 @@ RecordingModel.prototype = {
* is recording.
*/
isRecording: function () {
return this._isRecording;
return this._recording;
},
/**
@ -216,26 +204,35 @@ RecordingModel.prototype = {
addTimelineData: function (eventName, ...data) {
// If this model isn't currently recording,
// ignore the timeline data.
if (!this.isRecording()) {
if (!this._recording) {
return;
}
switch (eventName) {
// Accumulate markers into an array.
// Accumulate markers into an array. Furthermore, timestamps do not
// have a zero epoch, so offset all of them by the timeline's start time.
case "markers":
let [markers] = data;
RecordingUtils.offsetMarkerTimes(markers, this._timelineStartTime);
Array.prototype.push.apply(this._markers, markers);
break;
// Accumulate stack frames into an array.
case "frames":
let [, frames] = data;
Array.prototype.push.apply(this._frames, frames);
break;
// Accumulate memory measurements into an array.
// Accumulate memory measurements into an array. Furthermore, the
// timestamp does not have a zero epoch, so offset it.
case "memory":
let [delta, measurement] = data;
this._memory.push({ delta, value: measurement.total / 1024 / 1024 });
let [currentTime, measurement] = data;
this._memory.push({
delta: currentTime - this._timelineStartTime,
value: measurement.total / 1024 / 1024
});
break;
// Save the accumulated refresh driver ticks.
case "ticks":
let [, timestamps] = data;

Просмотреть файл

@ -0,0 +1,59 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
/**
* Utility functions for managing recording models and their internal data,
* such as filtering profile samples or offsetting timestamps.
*/
exports.RecordingUtils = {};
/**
* Filters all the samples in the provided profiler data to be more recent
* than the specified start time.
*
* @param object profile
* The profiler data received from the backend.
* @param number profilerStartTime
* The earliest acceptable sample time (in milliseconds).
*/
exports.RecordingUtils.filterSamples = function(profile, profilerStartTime) {
let firstThread = profile.threads[0];
firstThread.samples = firstThread.samples.filter(e => {
return e.time >= profilerStartTime;
});
}
/**
* Offsets all the samples in the provided profiler data by the specified time.
*
* @param object profile
* The profiler data received from the backend.
* @param number timeOffset
* The amount of time to offset by (in milliseconds).
*/
exports.RecordingUtils.offsetSampleTimes = function(profile, timeOffset) {
let firstThread = profile.threads[0];
for (let sample of firstThread.samples) {
sample.time -= timeOffset;
}
}
/**
* Offsets all the markers in the provided timeline data by the specified time.
*
* @param array markers
* The markers array received from the backend.
* @param number timeOffset
* The amount of time to offset by (in milliseconds).
*/
exports.RecordingUtils.offsetMarkerTimes = function(markers, timeOffset) {
for (let marker of markers) {
marker.start -= timeOffset;
marker.end -= timeOffset;
}
}

Просмотреть файл

@ -7,6 +7,7 @@ EXTRA_JS_MODULES.devtools.performance += [
'modules/front.js',
'modules/io.js',
'modules/recording-model.js',
'modules/recording-utils.js',
'panel.js'
]

Просмотреть файл

@ -21,15 +21,8 @@ devtools.lazyRequireGetter(this, "TIMELINE_BLUEPRINT",
"devtools/timeline/global", true);
devtools.lazyRequireGetter(this, "L10N",
"devtools/profiler/global", true);
devtools.lazyRequireGetter(this, "PerformanceIO",
"devtools/performance/io", true);
devtools.lazyRequireGetter(this, "RecordingModel",
"devtools/performance/recording-model", true);
devtools.lazyRequireGetter(this, "RECORDING_IN_PROGRESS",
"devtools/performance/recording-model", true);
devtools.lazyRequireGetter(this, "RECORDING_UNAVAILABLE",
"devtools/performance/recording-model", true);
devtools.lazyRequireGetter(this, "MarkersOverview",
"devtools/timeline/markers-overview", true);
devtools.lazyRequireGetter(this, "MemoryOverview",
@ -81,6 +74,8 @@ const EVENTS = {
// When a recording is started or stopped via the PerformanceController
RECORDING_STARTED: "Performance:RecordingStarted",
RECORDING_STOPPED: "Performance:RecordingStopped",
RECORDING_WILL_START: "Performance:RecordingWillStart",
RECORDING_WILL_STOP: "Performance:RecordingWillStop",
// When a recording is imported or exported via the PerformanceController
RECORDING_IMPORTED: "Performance:RecordingImported",
@ -106,15 +101,15 @@ const EVENTS = {
// Emitted by the CallTreeView when a call tree has been rendered
CALL_TREE_RENDERED: "Performance:UI:CallTreeRendered",
// When a source is shown in the JavaScript Debugger at a specific location.
SOURCE_SHOWN_IN_JS_DEBUGGER: "Performance:UI:SourceShownInJsDebugger",
SOURCE_NOT_FOUND_IN_JS_DEBUGGER: "Performance:UI:SourceNotFoundInJsDebugger",
// Emitted by the WaterfallView when it has been rendered
WATERFALL_RENDERED: "Performance:UI:WaterfallRendered",
// Emitted by the FlameGraphView when it has been rendered
FLAMEGRAPH_RENDERED: "Performance:UI:FlameGraphRendered"
FLAMEGRAPH_RENDERED: "Performance:UI:FlameGraphRendered",
// When a source is shown in the JavaScript Debugger at a specific location.
SOURCE_SHOWN_IN_JS_DEBUGGER: "Performance:UI:SourceShownInJsDebugger",
SOURCE_NOT_FOUND_IN_JS_DEBUGGER: "Performance:UI:SourceNotFoundInJsDebugger"
};
/**
@ -192,7 +187,7 @@ let PerformanceController = {
gFront.on("ticks", this._onTimelineData); // framerate
gFront.on("markers", this._onTimelineData); // timeline markers
gFront.on("frames", this._onTimelineData); // stack frames
gFront.on("memory", this._onTimelineData); // timeline memory
gFront.on("memory", this._onTimelineData); // memory measurements
}),
/**
@ -225,11 +220,13 @@ let PerformanceController = {
* when the front has started to record.
*/
startRecording: Task.async(function *() {
let recording = this.createNewRecording();
this.setCurrentRecording(recording);
yield recording.startRecording();
let recording = this._createRecording();
this.emit(EVENTS.RECORDING_WILL_START, recording);
yield recording.startRecording({ withTicks: true, withMemory: true });
this.emit(EVENTS.RECORDING_STARTED, recording);
this.setCurrentRecording(recording);
}),
/**
@ -238,13 +235,15 @@ let PerformanceController = {
*/
stopRecording: Task.async(function *() {
let recording = this._getLatestRecording();
yield recording.stopRecording();
this.emit(EVENTS.RECORDING_WILL_STOP, recording);
yield recording.stopRecording();
this.emit(EVENTS.RECORDING_STOPPED, recording);
}),
/**
* Saves the current recording to a file.
* Saves the given recording to a file. Emits `EVENTS.RECORDING_EXPORTED`
* when the file was saved.
*
* @param RecordingModel recording
* The model that holds the recording data.
@ -252,20 +251,19 @@ let PerformanceController = {
* The file to stream the data into.
*/
exportRecording: Task.async(function*(_, recording, file) {
let recordingData = recording.getAllData();
yield PerformanceIO.saveRecordingToFile(recordingData, file);
this.emit(EVENTS.RECORDING_EXPORTED, recordingData);
yield recording.exportRecording(file);
this.emit(EVENTS.RECORDING_EXPORTED, recording);
}),
/**
* Loads a recording from a file, adding it to the recordings list.
* Loads a recording from a file, adding it to the recordings list. Emits
* `EVENTS.RECORDING_IMPORTED` when the file was loaded.
*
* @param nsILocalFile file
* The file to import the data from.
*/
importRecording: Task.async(function*(_, file) {
let recording = this.createNewRecording();
let recording = this._createRecording();
yield recording.importRecording(file);
this.emit(EVENTS.RECORDING_IMPORTED, recording);
@ -278,11 +276,8 @@ let PerformanceController = {
* @return RecordingModel
* The newly created recording model.
*/
createNewRecording: function () {
let recording = new RecordingModel({
front: gFront,
performance: performance
});
_createRecording: function () {
let recording = new RecordingModel({ front: gFront, performance });
this._recordings.push(recording);
this.emit(EVENTS.RECORDING_CREATED, recording);

Просмотреть файл

@ -19,10 +19,10 @@
<script type="application/javascript" src="performance/recording-model.js"/>
<script type="application/javascript" src="performance/views/overview.js"/>
<script type="application/javascript" src="performance/views/toolbar.js"/>
<script type="application/javascript" src="performance/views/details.js"/>
<script type="application/javascript" src="performance/views/details-call-tree.js"/>
<script type="application/javascript" src="performance/views/details-waterfall.js"/>
<script type="application/javascript" src="performance/views/details-flamegraph.js"/>
<script type="application/javascript" src="performance/views/details.js"/>
<script type="application/javascript" src="performance/views/recordings.js"/>
<popupset id="performance-options-popupset">
@ -34,6 +34,7 @@
tooltiptext="&profilerUI.invertTree.tooltiptext;"/>
</menupopup>
</popupset>
<hbox class="theme-body" flex="1">
<vbox id="recordings-pane">
<toolbar id="recordings-toolbar"
@ -53,7 +54,8 @@
</toolbar>
<vbox id="recordings-list" flex="1"/>
</vbox>
<vbox flex="1">
<vbox id="performance-pane" flex="1">
<toolbar id="performance-toolbar" class="devtools-toolbar">
<hbox id="performance-toolbar-controls-detail-views" class="devtools-toolbarbutton-group">
<toolbarbutton id="select-waterfall-view"
@ -80,6 +82,7 @@
<hbox id="memory-overview"/>
<hbox id="time-framerate"/>
</vbox>
<deck id="details-pane" flex="1">
<hbox id="waterfall-view" flex="1">
<vbox id="waterfall-breakdown" flex="1" />

Просмотреть файл

@ -3,6 +3,8 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const CALLTREE_UPDATE_DEBOUNCE = 50; // ms
/**
* CallTree view containing profiler call tree, controlled by DetailsView.
*/
@ -11,9 +13,9 @@ let CallTreeView = {
* Sets up the view with event binding.
*/
initialize: function () {
this._callTree = $(".call-tree-cells-container");
this._onRecordingStoppedOrSelected = this._onRecordingStoppedOrSelected.bind(this);
this._onRangeChange = this._onRangeChange.bind(this);
this._onDetailsViewSelected = this._onDetailsViewSelected.bind(this);
this._onPrefChanged = this._onPrefChanged.bind(this);
this._onLink = this._onLink.bind(this);
@ -22,28 +24,35 @@ let CallTreeView = {
PerformanceController.on(EVENTS.PREF_CHANGED, this._onPrefChanged);
OverviewView.on(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.on(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.on(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
},
/**
* Unbinds events.
*/
destroy: function () {
clearNamedTimeout("calltree-update");
PerformanceController.off(EVENTS.RECORDING_STOPPED, this._onRecordingStoppedOrSelected);
PerformanceController.off(EVENTS.RECORDING_SELECTED, this._onRecordingStoppedOrSelected);
PerformanceController.off(EVENTS.PREF_CHANGED, this._onPrefChanged);
OverviewView.off(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.off(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.off(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
},
/**
* Method for handling all the set up for rendering a new call tree.
*
* @param object interval [optional]
* The { startTime, endTime }, in milliseconds.
* @param object options [optional]
* Additional options for new the call tree.
*/
render: function (profilerData, beginAt, endAt, options={}) {
// Empty recordings might yield no profiler data.
if (profilerData.profile == null) {
return;
}
let threadNode = this._prepareCallTree(profilerData, beginAt, endAt, options);
render: function (interval={}, options={}) {
let recording = PerformanceController.getCurrentRecording();
let profile = recording.getProfile();
let threadNode = this._prepareCallTree(profile, interval, options);
this._populateCallTree(threadNode, options);
this.emit(EVENTS.CALL_TREE_RENDERED);
},
@ -52,23 +61,32 @@ let CallTreeView = {
* Called when recording is stopped or has been selected.
*/
_onRecordingStoppedOrSelected: function (_, recording) {
// If not recording, then this recording is done and we can render all of it
if (!recording.isRecording()) {
let profilerData = recording.getProfilerData();
this.render(profilerData);
this.render();
}
},
/**
* Fired when a range is selected or cleared in the OverviewView.
*/
_onRangeChange: function (_, params) {
// When a range is cleared, we'll have no beginAt/endAt data,
// so the rebuild will just render all the data again.
let recording = PerformanceController.getCurrentRecording();
let profilerData = recording.getProfilerData();
let { beginAt, endAt } = params || {};
this.render(profilerData, beginAt, endAt);
_onRangeChange: function (_, interval) {
if (DetailsView.isViewSelected(this)) {
let debounced = () => this.render(interval);
setNamedTimeout("calltree-update", CALLTREE_UPDATE_DEBOUNCE, debounced);
} else {
this._dirty = true;
this._interval = interval;
}
},
/**
* Fired when a view is selected in the DetailsView.
*/
_onDetailsViewSelected: function() {
if (DetailsView.isViewSelected(this) && this._dirty) {
this.render(this._interval);
this._dirty = false;
}
},
/**
@ -85,8 +103,8 @@ let CallTreeView = {
* Called when the recording is stopped and prepares data to
* populate the call tree.
*/
_prepareCallTree: function (profilerData, startTime, endTime, options) {
let threadSamples = profilerData.profile.threads[0].samples;
_prepareCallTree: function (profile, { startTime, endTime }, options) {
let threadSamples = profile.threads[0].samples;
let contentOnly = !Prefs.showPlatformData;
let invertTree = PerformanceController.getPref("invert-call-tree");
@ -106,19 +124,25 @@ let CallTreeView = {
*/
_populateCallTree: function (frameNode, options={}) {
let root = new CallView({
autoExpandDepth: options.inverted ? 0 : undefined,
frame: frameNode,
inverted: options.inverted,
// Root nodes are hidden in inverted call trees.
hidden: options.inverted,
inverted: options.inverted
// Call trees should only auto-expand when not inverted. Passing undefined
// will default to the CALL_TREE_AUTO_EXPAND depth.
autoExpandDepth: options.inverted ? 0 : undefined,
});
// Bind events
// Bind events.
root.on("link", this._onLink);
// Clear out other call trees.
this._callTree.innerHTML = "";
root.attachTo(this._callTree);
let container = $(".call-tree-cells-container");
container.innerHTML = "";
root.attachTo(container);
// When platform data isn't shown, hide the cateogry labels, since they're
// only available for C++ frames.
let contentOnly = !Prefs.showPlatformData;
root.toggleCategories(!contentOnly);
},
@ -128,9 +152,7 @@ let CallTreeView = {
*/
_onPrefChanged: function (_, prefName, value) {
if (prefName === "invert-call-tree") {
let { beginAt, endAt } = OverviewView.getRange();
let profilerData = PerformanceController.getCurrentRecording().getProfilerData();
this.render(profilerData, beginAt || void 0, endAt || void 0);
this.render(OverviewView.getTimeInterval());
}
}
};

Просмотреть файл

@ -14,42 +14,46 @@ let FlameGraphView = {
initialize: Task.async(function* () {
this._onRecordingStoppedOrSelected = this._onRecordingStoppedOrSelected.bind(this);
this._onRangeChange = this._onRangeChange.bind(this);
this._onRangeChangeInGraph = this._onRangeChangeInGraph.bind(this);
this._onDetailsViewSelected = this._onDetailsViewSelected.bind(this);
this.graph = new FlameGraph($("#flamegraph-view"));
this.graph.timelineTickUnits = L10N.getStr("graphs.ms");
yield this.graph.ready();
this.graph.on("selecting", this._onRangeChangeInGraph);
PerformanceController.on(EVENTS.RECORDING_STOPPED, this._onRecordingStoppedOrSelected);
PerformanceController.on(EVENTS.RECORDING_SELECTED, this._onRecordingStoppedOrSelected);
OverviewView.on(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.on(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.on(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
}),
/**
* Unbinds events.
*/
destroy: function () {
this.graph.off("selecting", this._onRangeChangeInGraph);
PerformanceController.off(EVENTS.RECORDING_STOPPED, this._onRecordingStoppedOrSelected);
PerformanceController.off(EVENTS.RECORDING_SELECTED, this._onRecordingStoppedOrSelected);
OverviewView.off(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.off(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.off(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
},
/**
* Method for handling all the set up for rendering a new flamegraph.
*
* @param object interval [optional]
* The { startTime, endTime }, in milliseconds.
*/
render: function (profilerData) {
// Empty recordings might yield no profiler data.
if (profilerData.profile == null) {
return;
}
let samples = profilerData.profile.threads[0].samples;
let dataSrc = FlameGraphUtils.createFlameGraphDataFromSamples(samples, {
flattenRecursion: Prefs.flattenTreeRecursion,
filterFrames: !Prefs.showPlatformData && FrameNode.isContent,
showIdleBlocks: Prefs.showIdleBlocks && L10N.getStr("table.idle")
});
this.graph.setData(dataSrc);
render: function (interval={}) {
let recording = PerformanceController.getCurrentRecording();
let startTime = interval.startTime || 0;
let endTime = interval.endTime || recording.getDuration();
this.graph.setViewRange({ startTime, endTime });
this.emit(EVENTS.FLAMEGRAPH_RENDERED);
},
@ -57,18 +61,50 @@ let FlameGraphView = {
* Called when recording is stopped or selected.
*/
_onRecordingStoppedOrSelected: function (_, recording) {
// If not recording, then this recording is done and we can render all of it
if (!recording.isRecording()) {
let profilerData = recording.getProfilerData();
this.render(profilerData);
if (recording.isRecording()) {
return;
}
let profile = recording.getProfile();
let samples = profile.threads[0].samples;
let data = FlameGraphUtils.createFlameGraphDataFromSamples(samples, {
flattenRecursion: Prefs.flattenTreeRecursion,
filterFrames: !Prefs.showPlatformData && FrameNode.isContent,
showIdleBlocks: Prefs.showIdleBlocks && L10N.getStr("table.idle")
});
let startTime = 0;
let endTime = recording.getDuration();
this.graph.setData({ data, bounds: { startTime, endTime } });
this.render();
},
/**
* Fired when a range is selected or cleared in the OverviewView.
*/
_onRangeChange: function (_, params) {
// TODO bug 1105014
_onRangeChange: function (_, interval) {
if (DetailsView.isViewSelected(this)) {
this.render(interval);
} else {
this._dirty = true;
this._interval = interval;
}
},
/**
* Fired when a range is selected or cleared in the FlameGraph.
*/
_onRangeChangeInGraph: function () {
let interval = this.graph.getViewRange();
OverviewView.setTimeInterval(interval, { stopPropagation: true });
},
/**
* Fired when a view is selected in the DetailsView.
*/
_onDetailsViewSelected: function() {
if (DetailsView.isViewSelected(this) && this._dirty) {
this.render(this._interval);
this._dirty = false;
}
}
};

Просмотреть файл

@ -3,6 +3,8 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const WATERFALL_UPDATE_DEBOUNCE = 10; // ms
/**
* Waterfall view containing the timeline markers, controlled by DetailsView.
*/
@ -13,6 +15,8 @@ let WaterfallView = {
initialize: Task.async(function *() {
this._onRecordingStarted = this._onRecordingStarted.bind(this);
this._onRecordingStoppedOrSelected = this._onRecordingStoppedOrSelected.bind(this);
this._onRangeChange = this._onRangeChange.bind(this);
this._onDetailsViewSelected = this._onDetailsViewSelected.bind(this);
this._onMarkerSelected = this._onMarkerSelected.bind(this);
this._onResize = this._onResize.bind(this);
@ -26,6 +30,9 @@ let WaterfallView = {
PerformanceController.on(EVENTS.RECORDING_STARTED, this._onRecordingStarted);
PerformanceController.on(EVENTS.RECORDING_STOPPED, this._onRecordingStoppedOrSelected);
PerformanceController.on(EVENTS.RECORDING_SELECTED, this._onRecordingStoppedOrSelected);
OverviewView.on(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.on(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.on(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
this.waterfall.recalculateBounds();
}),
@ -34,6 +41,8 @@ let WaterfallView = {
* Unbinds events.
*/
destroy: function () {
clearNamedTimeout("waterfall-update");
this.waterfall.off("selected", this._onMarkerSelected);
this.waterfall.off("unselected", this._onMarkerSelected);
this.details.off("resize", this._onResize);
@ -41,18 +50,23 @@ let WaterfallView = {
PerformanceController.off(EVENTS.RECORDING_STARTED, this._onRecordingStarted);
PerformanceController.off(EVENTS.RECORDING_STOPPED, this._onRecordingStoppedOrSelected);
PerformanceController.off(EVENTS.RECORDING_SELECTED, this._onRecordingStoppedOrSelected);
OverviewView.off(EVENTS.OVERVIEW_RANGE_SELECTED, this._onRangeChange);
OverviewView.off(EVENTS.OVERVIEW_RANGE_CLEARED, this._onRangeChange);
DetailsView.off(EVENTS.DETAILS_VIEW_SELECTED, this._onDetailsViewSelected);
},
/**
* Method for handling all the set up for rendering a new waterfall.
*
* @param object interval [optional]
* The { startTime, endTime }, in milliseconds.
*/
render: function() {
render: function(interval={}) {
let recording = PerformanceController.getCurrentRecording();
let { startTime, endTime } = recording.getInterval();
let startTime = interval.startTime || 0;
let endTime = interval.endTime || recording.getDuration();
let markers = recording.getMarkers();
this.waterfall.setData(markers, startTime, startTime, endTime);
this.waterfall.setData({ markers, interval: { startTime, endTime } });
this.emit(EVENTS.WATERFALL_RENDERED);
},
@ -72,6 +86,29 @@ let WaterfallView = {
}
},
/**
* Fired when a range is selected or cleared in the OverviewView.
*/
_onRangeChange: function (_, interval) {
if (DetailsView.isViewSelected(this)) {
let debounced = () => this.render(interval);
setNamedTimeout("waterfall-update", WATERFALL_UPDATE_DEBOUNCE, debounced);
} else {
this._dirty = true;
this._interval = interval;
}
},
/**
* Fired when a view is selected in the DetailsView.
*/
_onDetailsViewSelected: function() {
if (DetailsView.isViewSelected(this) && this._dirty) {
this.render(this._interval);
this._dirty = false;
}
},
/**
* Called when a marker is selected in the waterfall view,
* updating the markers detail view.

Просмотреть файл

@ -13,10 +13,10 @@ let DetailsView = {
/**
* Name to index mapping of subviews, used by selecting view.
*/
viewIndexes: {
waterfall: 0,
calltree: 1,
flamegraph: 2
components: {
waterfall: { index: 0, view: WaterfallView },
calltree: { index: 1, view: CallTreeView },
flamegraph: { index: 2, view: FlameGraphView }
},
/**
@ -32,8 +32,8 @@ let DetailsView = {
button.addEventListener("command", this._onViewToggle);
}
yield CallTreeView.initialize();
yield WaterfallView.initialize();
yield CallTreeView.initialize();
yield FlameGraphView.initialize();
this.selectView(DEFAULT_DETAILS_SUBVIEW);
@ -47,8 +47,8 @@ let DetailsView = {
button.removeEventListener("command", this._onViewToggle);
}
yield CallTreeView.destroy();
yield WaterfallView.destroy();
yield CallTreeView.destroy();
yield FlameGraphView.destroy();
}),
@ -56,23 +56,56 @@ let DetailsView = {
* Select one of the DetailView's subviews to be rendered,
* hiding the others.
*
* @params {String} selectedView
* Name of the view to be shown.
* @param String viewName
* Name of the view to be shown.
*/
selectView: function (selectedView) {
this.el.selectedIndex = this.viewIndexes[selectedView];
selectView: function (viewName) {
this.el.selectedIndex = this.components[viewName].index;
for (let button of $$("toolbarbutton[data-view]", this.toolbar)) {
if (button.getAttribute("data-view") === selectedView) {
if (button.getAttribute("data-view") === viewName) {
button.setAttribute("checked", true);
} else {
button.removeAttribute("checked");
}
}
this.emit(EVENTS.DETAILS_VIEW_SELECTED, selectedView);
this.emit(EVENTS.DETAILS_VIEW_SELECTED, viewName);
},
/**
* Checks if the provided view is currently selected.
*
* @param object viewObject
* @return boolean
*/
isViewSelected: function(viewObject) {
let selectedIndex = this.el.selectedIndex;
for (let [, { index, view }] of Iterator(this.components)) {
if (index == selectedIndex && view == viewObject) {
return true;
}
}
return false;
},
/**
* Resolves when the provided view is selected. If already selected,
* the returned promise resolves immediately.
*
* @param object viewObject
* @return object
*/
whenViewSelected: Task.async(function*(viewObject) {
if (this.isViewSelected(viewObject)) {
return promise.resolve();
}
yield this.once(EVENTS.DETAILS_VIEW_SELECTED);
return this.whenViewSelected(viewObject);
}),
/**
* Called when a view button is clicked.
*/

Просмотреть файл

@ -17,8 +17,6 @@ const MARKERS_GRAPH_ROW_HEIGHT = 10; // px
const MARKERS_GROUP_VERTICAL_PADDING = 4; // px
const MEMORY_GRAPH_HEIGHT = 30; // px
const GRAPH_SCROLL_EVENTS_DRAIN = 50; // ms
/**
* View handler for the overview panel's time view, displaying
* framerate, markers and memory over time.
@ -31,51 +29,79 @@ let OverviewView = {
* Sets up the view with event binding.
*/
initialize: Task.async(function *() {
this._onRecordingWillStart = this._onRecordingWillStart.bind(this);
this._onRecordingStarted = this._onRecordingStarted.bind(this);
this._onRecordingWillStop = this._onRecordingWillStop.bind(this);
this._onRecordingStopped = this._onRecordingStopped.bind(this);
this._onRecordingSelected = this._onRecordingSelected.bind(this);
this._onRecordingTick = this._onRecordingTick.bind(this);
this._onGraphMouseUp = this._onGraphMouseUp.bind(this);
this._onGraphScroll = this._onGraphScroll.bind(this);
this._onGraphSelecting = this._onGraphSelecting.bind(this);
yield this._showFramerateGraph();
yield this._showMarkersGraph();
yield this._showMemoryGraph();
yield this._showFramerateGraph();
this.framerateGraph.on("mouseup", this._onGraphMouseUp);
this.framerateGraph.on("scroll", this._onGraphScroll);
this.markersOverview.on("mouseup", this._onGraphMouseUp);
this.markersOverview.on("scroll", this._onGraphScroll);
this.memoryOverview.on("mouseup", this._onGraphMouseUp);
this.memoryOverview.on("scroll", this._onGraphScroll);
this.markersOverview.on("selecting", this._onGraphSelecting);
PerformanceController.on(EVENTS.RECORDING_WILL_START, this._onRecordingWillStart);
PerformanceController.on(EVENTS.RECORDING_STARTED, this._onRecordingStarted);
PerformanceController.on(EVENTS.RECORDING_WILL_STOP, this._onRecordingWillStop);
PerformanceController.on(EVENTS.RECORDING_STOPPED, this._onRecordingStopped);
PerformanceController.on(EVENTS.RECORDING_SELECTED, this._onRecordingSelected);
// Populate this overview with some dummy initial data.
this.markersOverview.setData({ duration: 1000, markers: [] });
this.memoryOverview.setData([]);
this.framerateGraph.setData([]);
}),
/**
* Unbinds events.
*/
destroy: function () {
this.framerateGraph.off("mouseup", this._onGraphMouseUp);
this.framerateGraph.off("scroll", this._onGraphScroll);
this.markersOverview.off("mouseup", this._onGraphMouseUp);
this.markersOverview.off("scroll", this._onGraphScroll);
this.memoryOverview.off("mouseup", this._onGraphMouseUp);
this.memoryOverview.off("scroll", this._onGraphScroll);
this.markersOverview.off("selecting", this._onGraphSelecting);
clearNamedTimeout("graph-scroll");
PerformanceController.off(EVENTS.RECORDING_WILL_START, this._onRecordingWillStart);
PerformanceController.off(EVENTS.RECORDING_STARTED, this._onRecordingStarted);
PerformanceController.off(EVENTS.RECORDING_WILL_STOP, this._onRecordingWillStop);
PerformanceController.off(EVENTS.RECORDING_STOPPED, this._onRecordingStopped);
PerformanceController.off(EVENTS.RECORDING_SELECTED, this._onRecordingSelected);
},
/**
* Gets currently selected range's beginAt and endAt values.
* Sets the time interval selection for all graphs in this overview.
*
* @param object interval
* The { starTime, endTime }, in milliseconds.
*/
getRange: function () {
return { beginAt: this._beginAt, endAt: this._endAt };
setTimeInterval: function(interval, options = {}) {
let recording = PerformanceController.getCurrentRecording();
if (recording == null) {
throw "A recording should be available in order to set the selection."
}
let mapStart = () => 0;
let mapEnd = () => recording.getDuration();
let selection = { start: interval.startTime, end: interval.endTime };
this._stopSelectionChangeEventPropagation = options.stopPropagation;
this.markersOverview.setMappedSelection(selection, { mapStart, mapEnd });
this._stopSelectionChangeEventPropagation = false;
},
/**
* Gets the time interval selection for all graphs in this overview.
*
* @return object
* The { starTime, endTime }, in milliseconds.
*/
getTimeInterval: function() {
let recording = PerformanceController.getCurrentRecording();
if (recording == null) {
throw "A recording should be available in order to get the selection."
}
let mapStart = () => 0;
let mapEnd = () => recording.getDuration();
let selection = this.markersOverview.getMappedSelection({ mapStart, mapEnd });
return { startTime: selection.min, endTime: selection.max };
},
/**
@ -98,9 +124,6 @@ let OverviewView = {
this.markersOverview.rowHeight = MARKERS_GRAPH_ROW_HEIGHT;
this.markersOverview.groupPadding = MARKERS_GROUP_VERTICAL_PADDING;
yield this.markersOverview.ready();
CanvasGraphUtils.linkAnimation(this.framerateGraph, this.markersOverview);
CanvasGraphUtils.linkSelection(this.framerateGraph, this.markersOverview);
}),
/**
@ -111,8 +134,21 @@ let OverviewView = {
this.memoryOverview.fixedHeight = MEMORY_GRAPH_HEIGHT;
yield this.memoryOverview.ready();
CanvasGraphUtils.linkAnimation(this.framerateGraph, this.memoryOverview);
CanvasGraphUtils.linkSelection(this.framerateGraph, this.memoryOverview);
CanvasGraphUtils.linkAnimation(this.markersOverview, this.memoryOverview);
CanvasGraphUtils.linkSelection(this.markersOverview, this.memoryOverview);
}),
/**
* Sets up the framerate graph.
*/
_showFramerateGraph: Task.async(function *() {
let metric = L10N.getStr("graphs.fps");
this.framerateGraph = new LineGraphWidget($("#time-framerate"), { metric });
this.framerateGraph.fixedHeight = FRAMERATE_GRAPH_HEIGHT;
yield this.framerateGraph.ready();
CanvasGraphUtils.linkAnimation(this.markersOverview, this.framerateGraph);
CanvasGraphUtils.linkSelection(this.markersOverview, this.framerateGraph);
}),
/**
@ -123,19 +159,26 @@ let OverviewView = {
*/
render: Task.async(function *(resolution) {
let recording = PerformanceController.getCurrentRecording();
let interval = recording.getInterval();
let duration = recording.getDuration();
let markers = recording.getMarkers();
let memory = recording.getMemory();
let timestamps = recording.getTicks();
this.markersOverview.setData({ interval, markers });
this.emit(EVENTS.MARKERS_GRAPH_RENDERED);
this.memoryOverview.setData({ interval, memory });
this.emit(EVENTS.MEMORY_GRAPH_RENDERED);
yield this.framerateGraph.setDataFromTimestamps(timestamps, resolution);
this.emit(EVENTS.FRAMERATE_GRAPH_RENDERED);
// Empty or older recordings might yield no markers, memory or timestamps.
if (markers) {
this.markersOverview.setData({ markers, duration });
this.emit(EVENTS.MARKERS_GRAPH_RENDERED);
}
if (memory) {
this.memoryOverview.dataDuration = duration;
this.memoryOverview.setData(memory);
this.emit(EVENTS.MEMORY_GRAPH_RENDERED);
}
if (timestamps) {
this.framerateGraph.dataDuration = duration;
yield this.framerateGraph.setDataFromTimestamps(timestamps, resolution);
this.emit(EVENTS.FRAMERATE_GRAPH_RENDERED);
}
// Finished rendering all graphs in this overview.
this.emit(EVENTS.OVERVIEW_RENDERED);
@ -155,40 +198,21 @@ let OverviewView = {
* Fired when the graph selection has changed. Called by
* mouseup and scroll events.
*/
_onSelectionChange: function () {
if (this.framerateGraph.hasSelection()) {
let { min: beginAt, max: endAt } = this.framerateGraph.getMappedSelection();
this._beginAt = beginAt;
this._endAt = endAt;
this.emit(EVENTS.OVERVIEW_RANGE_SELECTED, { beginAt, endAt });
} else {
this._beginAt = null;
this._endAt = null;
_onGraphSelecting: function () {
let recording = PerformanceController.getCurrentRecording();
if (recording == null || this._stopSelectionChangeEventPropagation) {
return;
}
// If the range is smaller than a pixel (which can happen when performing
// a click on the graphs), treat this as a cleared selection.
let interval = this.getTimeInterval();
if (interval.endTime - interval.startTime < 1) {
this.emit(EVENTS.OVERVIEW_RANGE_CLEARED);
} else {
this.emit(EVENTS.OVERVIEW_RANGE_SELECTED, interval);
}
},
/**
* Listener handling the "mouseup" event for the framerate graph.
* Fires an event to be handled elsewhere.
*/
_onGraphMouseUp: function () {
// Only fire a selection change event if the selection is actually enabled.
if (this.framerateGraph.selectionEnabled) {
this._onSelectionChange();
}
},
/**
* Listener handling the "scroll" event for the framerate graph.
* Fires a debounced event to be handled elsewhere.
*/
_onGraphScroll: function () {
setNamedTimeout("graph-scroll", GRAPH_SCROLL_EVENTS_DRAIN, () => {
this._onSelectionChange();
});
},
/**
* Called to refresh the timer to keep firing _onRecordingTick.
*/
@ -201,22 +225,33 @@ let OverviewView = {
},
/**
* Called when recording starts.
* Called when recording will start.
*/
_onRecordingStarted: function (_, recording) {
_onRecordingWillStart: function (_, recording) {
this._checkSelection(recording);
this._timeoutId = setTimeout(this._onRecordingTick, OVERVIEW_UPDATE_INTERVAL);
this.framerateGraph.dropSelection();
},
/**
* Called when recording stops.
* Called when recording actually starts.
*/
_onRecordingStarted: function (_, recording) {
this._timeoutId = setTimeout(this._onRecordingTick, OVERVIEW_UPDATE_INTERVAL);
},
/**
* Called when recording will stop.
*/
_onRecordingWillStop: function(_, recording) {
clearTimeout(this._timeoutId);
this._timeoutId = null;
},
/**
* Called when recording actually stops.
*/
_onRecordingStopped: function (_, recording) {
this._checkSelection(recording);
clearTimeout(this._timeoutId);
this._timeoutId = null;
this.render(FRAMERATE_GRAPH_HIGH_RES_INTERVAL);
},
@ -224,7 +259,7 @@ let OverviewView = {
* Called when a new recording is selected.
*/
_onRecordingSelected: function (_, recording) {
this.framerateGraph.dropSelection();
this.markersOverview.dropSelection();
this._checkSelection(recording);
// If timeout exists, we have something recording, so
@ -236,9 +271,9 @@ let OverviewView = {
_checkSelection: function (recording) {
let selectionEnabled = !recording.isRecording();
this.framerateGraph.selectionEnabled = selectionEnabled;
this.markersOverview.selectionEnabled = selectionEnabled;
this.memoryOverview.selectionEnabled = selectionEnabled;
this.framerateGraph.selectionEnabled = selectionEnabled;
}
};

Просмотреть файл

@ -56,20 +56,22 @@ exports.CallView = CallView;
* top-down). Defaults to false.
*/
function CallView({ autoExpandDepth, caller, frame, level, hidden, inverted }) {
// Assume no indentation if the this tree item's level is not specified.
level = level || 0;
// Don't increase indentation if this tree item is hidden.
if (hidden) {
level--;
}
AbstractTreeItem.call(this, {
parent: caller,
level
});
AbstractTreeItem.call(this, { parent: caller, level });
this.caller = caller;
this.autoExpandDepth = autoExpandDepth != null
? autoExpandDepth
: caller ? caller.autoExpandDepth : CALL_TREE_AUTO_EXPAND;
: caller ? caller.autoExpandDepth
: CALL_TREE_AUTO_EXPAND;
this.caller = caller;
this.frame = frame;
this.hidden = hidden;
this.inverted = inverted;
@ -93,16 +95,17 @@ CallView.prototype = Heritage.extend(AbstractTreeItem.prototype, {
let selfPercentage;
let selfDuration;
if (!this._getChildCalls().length) {
selfPercentage = framePercentage;
selfDuration = this.frame.duration;
} else {
let childrenPercentage = sum([this._getPercentage(c.samples)
for (c of this._getChildCalls())]);
selfPercentage = clamp(framePercentage - childrenPercentage, 0, 100);
let childrenPercentage = sum(
[this._getPercentage(c.samples) for (c of this._getChildCalls())]);
let childrenDuration = sum(
[c.duration for (c of this._getChildCalls())]);
let childrenDuration = sum([c.duration
for (c of this._getChildCalls())]);
selfPercentage = clamp(framePercentage - childrenPercentage, 0, 100);
selfDuration = this.frame.duration - childrenDuration;
if (this.inverted) {

Просмотреть файл

@ -24,13 +24,12 @@ const GRAPH_RESIZE_EVENTS_DRAIN = 100; // ms
const GRAPH_WHEEL_ZOOM_SENSITIVITY = 0.00035;
const GRAPH_WHEEL_SCROLL_SENSITIVITY = 0.5;
const GRAPH_MIN_SELECTION_WIDTH = 10; // ms
const GRAPH_MIN_SELECTION_WIDTH = 20; // ms
const TIMELINE_TICKS_MULTIPLE = 5; // ms
const TIMELINE_TICKS_SPACING_MIN = 75; // px
const OVERVIEW_HEADER_HEIGHT = 16; // px
const OVERVIEW_HEADER_SAFE_BOUNDS = 50; // px
const OVERVIEW_HEADER_TEXT_COLOR = "#18191a";
const OVERVIEW_HEADER_TEXT_FONT_SIZE = 9; // px
const OVERVIEW_HEADER_TEXT_FONT_FAMILY = "sans-serif";
@ -53,9 +52,10 @@ const FLAME_GRAPH_BLOCK_TEXT_PADDING_RIGHT = 3; // px
*
* Example usage:
* let graph = new FlameGraph(node);
* let src = FlameGraphUtils.createFlameGraphDataFromSamples(samples);
* graph.once("ready", () => {
* graph.setData(src);
* let data = FlameGraphUtils.createFlameGraphDataFromSamples(samples);
* let bounds = { startTime, endTime };
* graph.setData({ data, bounds });
* });
*
* Data source format:
@ -120,6 +120,7 @@ function FlameGraph(parent, sharpness) {
this._height = canvas.height = bounds.height * this._pixelRatio;
this._ctx = canvas.getContext("2d");
this._bounds = new GraphSelection();
this._selection = new GraphSelection();
this._selectionDragger = new GraphSelectionDragger();
@ -192,8 +193,10 @@ FlameGraph.prototype = {
this._window.cancelAnimationFrame(this._animationId);
this._iframe.remove();
this._bounds = null;
this._selection = null;
this._selectionDragger = null;
this._textWidthsCache = null;
this._data = null;
@ -230,12 +233,15 @@ FlameGraph.prototype = {
* Sets the data source for this graph.
*
* @param object data
* The data source. See the constructor for more information.
* An object containing the following properties:
* - data: the data source; see the constructor for more info
* - bounds: the minimum/maximum { start, end }, in ms or px
* - visible: optional, the shown { start, end }, in ms or px
*/
setData: function(data) {
setData: function({ data, bounds, visible }) {
this._data = data;
this._selection = { start: 0, end: this._width };
this._shouldRedraw = true;
this.setOuterBounds(bounds);
this.setViewRange(visible || bounds);
},
/**
@ -260,14 +266,45 @@ FlameGraph.prototype = {
},
/**
* Gets the start or end of this graph's selection, i.e. the 'data window'.
* Sets the maximum selection (i.e. the 'graph bounds').
* @param object { start, end }
*/
setOuterBounds: function({ startTime, endTime }) {
this._bounds.start = startTime * this._pixelRatio;
this._bounds.end = endTime * this._pixelRatio;
this._shouldRedraw = true;
},
/**
* Sets the selection (i.e. the 'view range') bounds.
* @return number
*/
getDataWindowStart: function() {
return this._selection.start;
setViewRange: function({ startTime, endTime }) {
this._selection.start = startTime * this._pixelRatio;
this._selection.end = endTime * this._pixelRatio;
this._shouldRedraw = true;
},
getDataWindowEnd: function() {
return this._selection.end;
/**
* Gets the maximum selection (i.e. the 'graph bounds').
* @return number
*/
getOuterBounds: function() {
return {
startTime: this._bounds.start / this._pixelRatio,
endTime: this._bounds.end / this._pixelRatio
};
},
/**
* Gets the current selection (i.e. the 'view range').
* @return number
*/
getViewRange: function() {
return {
startTime: this._selection.start / this._pixelRatio,
endTime: this._selection.end / this._pixelRatio
};
},
/**
@ -346,9 +383,6 @@ FlameGraph.prototype = {
let canvasHeight = this._height;
let scaledOffset = dataOffset * dataScale;
let safeBounds = OVERVIEW_HEADER_SAFE_BOUNDS * this._pixelRatio;
let availableWidth = canvasWidth - safeBounds;
let fontSize = OVERVIEW_HEADER_TEXT_FONT_SIZE * this._pixelRatio;
let fontFamily = OVERVIEW_HEADER_TEXT_FONT_FAMILY;
let textPaddingLeft = OVERVIEW_HEADER_TEXT_PADDING_LEFT * this._pixelRatio;
@ -361,7 +395,7 @@ FlameGraph.prototype = {
ctx.strokeStyle = this.overviewTimelineStrokes;
ctx.beginPath();
for (let x = 0; x < availableWidth + scaledOffset; x += tickInterval) {
for (let x = 0; x < canvasWidth + scaledOffset; x += tickInterval) {
let lineLeft = x - scaledOffset;
let textLeft = lineLeft + textPaddingLeft;
let time = Math.round(x / dataScale / this._pixelRatio);
@ -650,6 +684,7 @@ FlameGraph.prototype = {
selection.end = dragger.anchor.end + (dragger.origin - mouseX) / selectionScale;
this._normalizeSelectionBounds();
this._shouldRedraw = true;
this.emit("selecting");
}
},
@ -707,6 +742,7 @@ FlameGraph.prototype = {
this._normalizeSelectionBounds();
this._shouldRedraw = true;
this.emit("selecting");
},
/**
@ -715,34 +751,33 @@ FlameGraph.prototype = {
* wider than the allowed minimum width.
*/
_normalizeSelectionBounds: function() {
let canvasWidth = this._width;
let canvasHeight = this._height;
let boundsStart = this._bounds.start;
let boundsEnd = this._bounds.end;
let selectionStart = this._selection.start;
let selectionEnd = this._selection.end;
let { start, end } = this._selection;
let minSelectionWidth = GRAPH_MIN_SELECTION_WIDTH * this._pixelRatio;
if (start < 0) {
start = 0;
if (selectionStart < boundsStart) {
selectionStart = boundsStart;
}
if (end < 0) {
start = 0;
end = minSelectionWidth;
if (selectionEnd < boundsStart) {
selectionStart = boundsStart;
selectionEnd = GRAPH_MIN_SELECTION_WIDTH;
}
if (end > canvasWidth) {
end = canvasWidth;
if (selectionEnd > boundsEnd) {
selectionEnd = boundsEnd;
}
if (start > canvasWidth) {
end = canvasWidth;
start = canvasWidth - minSelectionWidth;
if (selectionStart > boundsEnd) {
selectionEnd = boundsEnd;
selectionStart = boundsEnd - GRAPH_MIN_SELECTION_WIDTH;
}
if (end - start < minSelectionWidth) {
let midPoint = (start + end) / 2;
start = midPoint - minSelectionWidth / 2;
end = midPoint + minSelectionWidth / 2;
if (selectionEnd - selectionStart < GRAPH_MIN_SELECTION_WIDTH) {
let midPoint = (selectionStart + selectionEnd) / 2;
selectionStart = midPoint - GRAPH_MIN_SELECTION_WIDTH / 2;
selectionEnd = midPoint + GRAPH_MIN_SELECTION_WIDTH / 2;
}
this._selection.start = start;
this._selection.end = end;
this._selection.start = selectionStart;
this._selection.end = selectionEnd;
},
/**

Просмотреть файл

@ -443,35 +443,68 @@ AbstractCanvasGraph.prototype = {
return { start: null, end: null };
},
/**
* Sets the selection bounds, scaled to correlate with the data source ranges,
* such that a [0, max width] selection maps to [first value, last value].
*
* @param object selection
* The selection's { start, end } values.
* @param object { mapStart, mapEnd } mapping [optional]
* Invoked when retrieving the numbers in the data source representing
* the first and last values, on the X axis.
*/
setMappedSelection: function(selection, mapping = {}) {
if (!this.hasData()) {
throw "A data source is necessary for retrieving a mapped selection.";
}
if (!selection || selection.start == null || selection.end == null) {
throw "Invalid selection coordinates";
}
let { mapStart, mapEnd } = mapping;
let startTime = (mapStart || (e => e.delta))(this._data[0]);
let endTime = (mapEnd || (e => e.delta))(this._data[this._data.length - 1]);
// The selection's start and end values are not guaranteed to be ascending.
// Also make sure that the selection bounds fit inside the data bounds.
let min = Math.max(Math.min(selection.start, selection.end), startTime);
let max = Math.min(Math.max(selection.start, selection.end), endTime);
min = map(min, startTime, endTime, 0, this._width);
max = map(max, startTime, endTime, 0, this._width);
this.setSelection({ start: min, end: max });
},
/**
* Gets the selection bounds, scaled to correlate with the data source ranges,
* such that a [0, max width] selection maps to [first value, last value].
*
* @param function unpack [optional]
* @param object { mapStart, mapEnd } mapping [optional]
* Invoked when retrieving the numbers in the data source representing
* the first and last values, on the X axis. Currently, all graphs
* store this in a "delta" property for all entries, but in the future
* this may change as new graphs with different data source format
* requirements are implemented.
* the first and last values, on the X axis.
* @return object
* The mapped selection's { min, max } values.
*/
getMappedSelection: function(unpack = e => e.delta) {
if (!this.hasData() || !this.hasSelection()) {
getMappedSelection: function(mapping = {}) {
if (!this.hasData()) {
throw "A data source is necessary for retrieving a mapped selection.";
}
if (!this.hasSelection() && !this.hasSelectionInProgress()) {
return { min: null, max: null };
}
let selection = this.getSelection();
let totalTicks = this._data.length;
let firstTick = unpack(this._data[0]);
let lastTick = unpack(this._data[totalTicks - 1]);
let { mapStart, mapEnd } = mapping;
let startTime = (mapStart || (e => e.delta))(this._data[0]);
let endTime = (mapEnd || (e => e.delta))(this._data[this._data.length - 1]);
// The selection's start and end values are not guaranteed to be ascending.
// This can happen, for example, when click & dragging from right to left.
// Also make sure that the selection bounds fit inside the canvas bounds.
let selection = this.getSelection();
let min = Math.max(Math.min(selection.start, selection.end), 0);
let max = Math.min(Math.max(selection.start, selection.end), this._width);
min = map(min, 0, this._width, firstTick, lastTick);
max = map(max, 0, this._width, firstTick, lastTick);
min = map(min, 0, this._width, startTime, endTime);
max = map(max, 0, this._width, startTime, endTime);
return { min: min, max: max };
},
@ -1226,8 +1259,13 @@ LineGraphWidget.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
dataOffsetX: 0,
/**
* The scalar used to multiply the graph values to leave some headroom
* on the top.
* Optionally uses this value instead of the last tick in the data source
* to compute the horizontal scaling.
*/
dataDuration: 0,
/**
* The scalar used to multiply the graph values to leave some headroom.
*/
dampenValuesFactor: LINE_GRAPH_DAMPEN_VALUES,
@ -1297,7 +1335,8 @@ LineGraphWidget.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
avgValue = sumValues / totalTicks;
}
let dataScaleX = this.dataScaleX = width / (lastTick - this.dataOffsetX);
let duration = this.dataDuration || lastTick;
let dataScaleX = this.dataScaleX = width / (duration - this.dataOffsetX);
let dataScaleY = this.dataScaleY = height / maxValue * this.dampenValuesFactor;
// Draw the background.

Просмотреть файл

@ -1,7 +1,6 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
let { Ci } = require("chrome");

Просмотреть файл

@ -28,13 +28,12 @@ const OVERVIEW_CLIPHEAD_LINE_COLOR = "#555";
const OVERVIEW_HEADER_TICKS_MULTIPLE = 100; // ms
const OVERVIEW_HEADER_TICKS_SPACING_MIN = 75; // px
const OVERVIEW_HEADER_SAFE_BOUNDS = 50; // px
const OVERVIEW_HEADER_TEXT_FONT_SIZE = 9; // px
const OVERVIEW_HEADER_TEXT_FONT_FAMILY = "sans-serif";
const OVERVIEW_HEADER_TEXT_PADDING_LEFT = 6; // px
const OVERVIEW_HEADER_TEXT_PADDING_TOP = 1; // px
const OVERVIEW_MARKERS_COLOR_STOPS = [0, 0.1, 0.75, 1];
const OVERVIEW_MARKER_DURATION_MIN = 4; // ms
const OVERVIEW_MARKER_WIDTH_MIN = 4; // px
const OVERVIEW_GROUP_VERTICAL_PADDING = 5; // px
/**
@ -47,16 +46,8 @@ const OVERVIEW_GROUP_VERTICAL_PADDING = 5; // px
*/
function MarkersOverview(parent, blueprint, ...args) {
AbstractCanvasGraph.apply(this, [parent, "markers-overview", ...args]);
this.setTheme();
// Set the list of names, properties and colors used to paint this overview.
this.setBlueprint(blueprint);
this.once("ready", () => {
// Populate this overview with some dummy initial data.
this.setData({ interval: { startTime: 0, endTime: 1000 }, markers: [] });
});
}
MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
@ -93,7 +84,7 @@ MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
clearView: function() {
this.selectionEnabled = false;
this.dropSelection();
this.setData({ interval: { startTime: 0, endTime: 0 }, markers: [] });
this.setData({ duration: 0, markers: [] });
},
/**
@ -101,14 +92,11 @@ MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
* @see AbstractCanvasGraph.prototype.buildGraphImage
*/
buildGraphImage: function() {
let { interval, markers } = this._data;
let { startTime, endTime } = interval;
let { markers, duration } = this._data;
let { canvas, ctx } = this._getNamedCanvas("markers-overview-data");
let canvasWidth = this._width;
let canvasHeight = this._height;
let safeBounds = OVERVIEW_HEADER_SAFE_BOUNDS * this._pixelRatio;
let availableWidth = canvasWidth - safeBounds;
// Group markers into separate paint batches. This is necessary to
// draw all markers sharing the same style at once.
@ -120,15 +108,13 @@ MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
}
}
// Calculate each group's height, and the time-based scaling.
// Calculate each row's height, and the time-based scaling.
let totalGroups = this._lastGroup + 1;
let headerHeight = this.headerHeight * this._pixelRatio;
let groupHeight = this.rowHeight * this._pixelRatio;
let groupPadding = this.groupPadding * this._pixelRatio;
let totalTime = (endTime - startTime) || 0;
let dataScale = this.dataScaleX = availableWidth / totalTime;
let headerHeight = this.headerHeight * this._pixelRatio;
let dataScale = this.dataScaleX = canvasWidth / duration;
// Draw the header and overview background.
@ -164,7 +150,7 @@ MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
ctx.strokeStyle = this.headerTimelineStrokeColor;
ctx.beginPath();
for (let x = 0; x < availableWidth; x += tickInterval) {
for (let x = 0; x < canvasWidth; x += tickInterval) {
let lineLeft = x;
let textLeft = lineLeft + textPaddingLeft;
let time = Math.round(x / dataScale);
@ -191,12 +177,8 @@ MarkersOverview.prototype = Heritage.extend(AbstractCanvasGraph.prototype, {
ctx.beginPath();
for (let { start, end } of batch) {
start -= interval.startTime;
end -= interval.startTime;
let left = start * dataScale;
let duration = Math.max(end - start, OVERVIEW_MARKER_DURATION_MIN);
let width = Math.max(duration * dataScale, this._pixelRatio);
let width = Math.max((end - start) * dataScale, OVERVIEW_MARKER_WIDTH_MIN);
ctx.rect(left, top, width, height);
}

Просмотреть файл

@ -19,7 +19,7 @@ const { getColor } = require("devtools/shared/theme");
loader.lazyRequireGetter(this, "L10N",
"devtools/timeline/global", true);
const HTML_NS = "http://www.w3.org/1999/xhtml";
const OVERVIEW_DAMPEN_VALUES = 0.95;
const OVERVIEW_HEIGHT = 30; // px
const OVERVIEW_STROKE_WIDTH = 1; // px
@ -38,14 +38,10 @@ const OVERVIEW_SELECTION_LINE_COLOR = "#555";
function MemoryOverview(parent) {
LineGraphWidget.call(this, parent, { metric: L10N.getStr("graphs.memory") });
this.setTheme();
this.once("ready", () => {
// Populate this overview with some dummy initial data.
this.setData({ interval: { startTime: 0, endTime: 1000 }, memory: [] });
});
}
MemoryOverview.prototype = Heritage.extend(LineGraphWidget.prototype, {
dampenValuesFactor: 0.95,
dampenValuesFactor: OVERVIEW_DAMPEN_VALUES,
fixedHeight: OVERVIEW_HEIGHT,
strokeWidth: OVERVIEW_STROKE_WIDTH,
maximumLineColor: OVERVIEW_MAXIMUM_LINE_COLOR,
@ -62,15 +58,7 @@ MemoryOverview.prototype = Heritage.extend(LineGraphWidget.prototype, {
clearView: function() {
this.selectionEnabled = false;
this.dropSelection();
this.setData({ interval: { startTime: 0, endTime: 0 }, memory: [] });
},
/**
* Sets the data source for this graph.
*/
setData: function({ interval, memory }) {
this.dataOffsetX = interval.startTime;
LineGraphWidget.prototype.setData.call(this, memory);
this.setData([]);
},
/**

Просмотреть файл

@ -98,24 +98,21 @@ Waterfall.prototype = {
/**
* Populates this view with the provided data source.
*
* @param array markers
* A list of markers received from the controller.
* @param number timeEpoch
* The absolute time (in milliseconds) when the recording started.
* @param number startTime
* The time (in milliseconds) to start drawing from.
* @param number endTime
* The time (in milliseconds) to end drawing at.
* @param object data
* An object containing the following properties:
* - markers: a list of markers received from the controller
* - interval: the { startTime, endTime }, in milliseconds
*/
setData: function(markers, timeEpoch, startTime, endTime) {
setData: function({ markers, interval }) {
this.clearView();
this._markers = markers;
this._interval = interval;
let { startTime, endTime } = interval;
let dataScale = this._waterfallWidth / (endTime - startTime);
this._drawWaterfallBackground(dataScale);
// Label the header as if the first possible marker was at T=0.
this._buildHeader(this._headerContents, startTime - timeEpoch, dataScale);
this._buildHeader(this._headerContents, startTime, dataScale);
this._buildMarkers(this._listContents, markers, startTime, endTime, dataScale);
this.selectRow(this._selectedRowIdx);
},
@ -226,13 +223,13 @@ Waterfall.prototype = {
});
for (let x = 0; x < this._waterfallWidth; x += tickInterval) {
let start = x + direction * WATERFALL_HEADER_TEXT_PADDING;
let time = Math.round(startTime + x / dataScale);
let left = x + direction * WATERFALL_HEADER_TEXT_PADDING;
let time = Math.round(x / dataScale + startTime);
let label = this._l10n.getFormatStr("timeline.tick", time);
let node = this._document.createElement("label");
node.className = "plain waterfall-header-tick";
node.style.transform = "translateX(" + (start - offset) + "px)";
node.style.transform = "translateX(" + (left - offset) + "px)";
node.setAttribute("value", label);
ticks.appendChild(node);
}
@ -256,6 +253,7 @@ Waterfall.prototype = {
for (let marker of markers) {
markerIdx++;
if (!isMarkerInRange(marker, startTime, endTime)) {
continue;
}
@ -281,9 +279,9 @@ Waterfall.prototype = {
}
// Otherwise prepare flushing the outstanding markers after a small delay.
else {
this._setNamedTimeout("flush-outstanding-markers",
WATERFALL_FLUSH_OUTSTANDING_MARKERS_DELAY,
() => this._buildOutstandingMarkers(parent));
let delay = WATERFALL_FLUSH_OUTSTANDING_MARKERS_DELAY;
let func = () => this._buildOutstandingMarkers(parent);
this._setNamedTimeout("flush-outstanding-markers", delay, func);
}
parent.appendChild(this._fragment);
@ -353,7 +351,6 @@ Waterfall.prototype = {
* Index of the row to select. -1 clears the selection.
*/
selectRow: function(idx) {
// Unselect
let prev = this._listContents.children[this._selectedRowIdx];
if (prev) {
prev.classList.remove("selected");
@ -365,6 +362,7 @@ Waterfall.prototype = {
if (row && !row.hasAttribute("is-spacer")) {
row.focus();
row.classList.add("selected");
let markerIdx = row.getAttribute("markerIdx");
this.emit("selected", this._markers[markerIdx]);
this.ensureRowIsVisible(row);
@ -487,9 +485,9 @@ Waterfall.prototype = {
bar.className = "waterfall-marker-bar";
bar.style.backgroundColor = blueprint.fill;
bar.style.borderColor = blueprint.stroke;
bar.style.transform = "translateX(" + (start - offset) + "px)";
// Save border color. It will change when marker is selected.
bar.setAttribute("borderColor", blueprint.stroke);
bar.style.transform = "translateX(" + (start - offset) + "px)";
bar.setAttribute("type", marker.name);
bar.setAttribute("width", Math.max(width, WATERFALL_MARKER_BAR_WIDTH_MIN));
waterfall.appendChild(bar);

Просмотреть файл

@ -1,13 +1,12 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
/**
* Many Gecko operations (painting, reflows, restyle, ...) can be tracked
* in real time. A marker is a representation of one operation. A marker
* has a name, and start and end timestamps. Markers are stored in docShells.
* has a name, start and end timestamps. Markers are stored in docShells.
*
* This actor exposes this tracking mechanism to the devtools protocol.
*
@ -18,7 +17,6 @@
*
* When markers are available, an event is emitted:
* TimelineFront.on("markers", function(markers) {...})
*
*/
const {Ci, Cu} = require("chrome");
@ -26,6 +24,7 @@ const protocol = require("devtools/server/protocol");
const {method, Arg, RetVal, Option} = protocol;
const events = require("sdk/event/core");
const {setTimeout, clearTimeout} = require("sdk/timers");
const {MemoryActor} = require("devtools/server/actors/memory");
const {FramerateActor} = require("devtools/server/actors/framerate");
const {StackFrameCache} = require("devtools/server/actors/utils/stack");
@ -56,22 +55,20 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
events: {
/**
* "markers" events are emitted every DEFAULT_TIMELINE_DATA_PULL_TIMEOUT ms
* at most, when profile markers are found. A marker has the following
* properties:
* - start {Number} ms
* - end {Number} ms
* - name {String}
* The "markers" events emitted every DEFAULT_TIMELINE_DATA_PULL_TIMEOUT ms
* at most, when profile markers are found. The timestamps on each marker
* are relative to when recording was started.
*/
"markers" : {
type: "markers",
markers: Arg(0, "array:json"),
markers: Arg(0, "json"),
endTime: Arg(1, "number")
},
/**
* "memory" events emitted in tandem with "markers", if this was enabled
* when the recording started.
* The "memory" events emitted in tandem with "markers", if this was enabled
* when the recording started. The `delta` timestamp on this measurement is
* relative to when recording was started.
*/
"memory" : {
type: "memory",
@ -80,8 +77,9 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
},
/**
* "ticks" events (from the refresh driver) emitted in tandem with "markers",
* if this was enabled when the recording started.
* The "ticks" events (from the refresh driver) emitted in tandem with
* "markers", if this was enabled when the recording started. All ticks
* are timestamps with a zero epoch.
*/
"ticks" : {
type: "ticks",
@ -89,6 +87,11 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
timestamps: Arg(1, "array-of-numbers-as-strings")
},
/**
* The "frames" events emitted in tandem with "markers", containing
* JS stack frames. The `delta` timestamp on this frames packet is
* relative to when recording was started.
*/
"frames" : {
type: "frames",
delta: Arg(0, "number"),
@ -96,6 +99,9 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
}
},
/**
* Initializes this actor with the provided connection and tab actor.
*/
initialize: function(conn, tabActor) {
protocol.Actor.prototype.initialize.call(this, conn);
this.tabActor = tabActor;
@ -118,6 +124,9 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
this.destroy();
},
/**
* Destroys this actor, stopping recording first.
*/
destroy: function() {
this.stop();
@ -161,7 +170,7 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
/**
* At regular intervals, pop the markers from the docshell, and forward
* markers if any.
* markers, memory, tick and frames events, if any.
*/
_pullTimelineData: function() {
if (!this._isRecording) {
@ -241,10 +250,10 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
}
if (withMemory) {
this._memoryActor = new MemoryActor(this.conn, this.tabActor,
this._stackFrames);
this._memoryActor = new MemoryActor(this.conn, this.tabActor, this._stackFrames);
events.emit(this, "memory", this._startTime, this._memoryActor.measure());
}
if (withTicks) {
this._framerateActor = new FramerateActor(this.conn, this.tabActor);
this._framerateActor.startRecording();
@ -275,6 +284,7 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
if (this._memoryActor) {
this._memoryActor = null;
}
if (this._framerateActor) {
this._framerateActor.stopRecording();
this._framerateActor = null;
@ -313,7 +323,6 @@ exports.TimelineFront = protocol.FrontClass(TimelineActor, {
protocol.Front.prototype.initialize.call(this, client, {actor: timelineActor});
this.manage(this);
},
destroy: function() {
protocol.Front.prototype.destroy.call(this);
},