Bug 1465589 - Convert Perfherder services to functions (#4243)

This commit is contained in:
Sarah Clements 2018-11-09 08:02:07 -08:00 коммит произвёл Ed Morley
Родитель 4a6095784c
Коммит c6502e5bff
10 изменённых файлов: 378 добавлений и 431 удалений

Просмотреть файл

@ -128,7 +128,7 @@ module.exports = {
neutrino.config.performance
.hints('error')
.maxAssetSize(1.20 * 1024 * 1024)
.maxEntrypointSize(1.63 * 1024 * 1024);
.maxEntrypointSize(1.64 * 1024 * 1024);
}
},
],

Просмотреть файл

@ -27,8 +27,6 @@ import './js/filters';
import './js/models/perf/issue_tracker';
import './js/models/perf/performance_framework';
import './js/models/perf/alerts';
import './js/services/perf/math';
import './js/services/perf/compare';
import './js/controllers/perf/compare';
import './js/controllers/perf/graphs';
import './js/controllers/perf/alerts';

Просмотреть файл

@ -34,3 +34,19 @@ export const destroy = function deleteRecord(uri) {
credentials: 'same-origin',
});
};
export const getData = async function getData(url) {
let failureStatus = null;
const response = await fetch(url);
if (!response.ok) {
failureStatus = response.status;
}
if (response.headers.get('content-type') === 'text/html' && failureStatus) {
return { data: { [failureStatus]: response.statusText }, failureStatus };
}
const data = await response.json();
return { data, failureStatus };
};

Просмотреть файл

@ -1,9 +1,10 @@
import React from 'react';
import PropTypes from 'prop-types';
import { updateQueryParams, validateQueryParams, getData, mergeData, formatBugs } from './helpers';
import { updateQueryParams, validateQueryParams, mergeData, formatBugs } from './helpers';
import { graphsEndpoint, parseQueryParams, createQueryParams, createApiUrl,
bugzillaBugsApi } from '../helpers/url';
import { getData } from '../helpers/http';
const withView = defaultState => WrappedComponent =>
class View extends React.Component {

Просмотреть файл

@ -119,22 +119,6 @@ export const validateQueryParams = function validateQueryParams(params, bugRequi
return messages;
};
export const getData = async function getData(url) {
let failureStatus = null;
const response = await fetch(url);
if (!response.ok) {
failureStatus = response.status;
}
if (response.headers.get('content-type') === 'text/html' && failureStatus) {
return { data: { [failureStatus]: response.statusText }, failureStatus };
}
const data = await response.json();
return { data, failureStatus };
};
export const tableRowStyling = function tableRowStyling(state, bug) {
if (bug) {
const style = { color: '#aaa' };

Просмотреть файл

@ -14,6 +14,8 @@ import {
import PushModel from '../../../models/push';
import RepositoryModel from '../../../models/repository';
import PerfSeriesModel from '../../../models/perfSeries';
import { getCounterMap, getInterval, validateQueryParams, getResultsMap,
getGraphsLink } from '../../../perfherder/helpers';
perf.controller('CompareChooserCtrl', [
'$state', '$stateParams', '$scope', '$q',
@ -139,11 +141,9 @@ perf.controller('CompareChooserCtrl', [
perf.controller('CompareResultsCtrl', [
'$state', '$stateParams', '$scope',
'$httpParamSerializer', '$q', 'PhFramework',
'PhCompare',
function CompareResultsCtrl($state, $stateParams, $scope,
$httpParamSerializer,
$q, PhFramework,
PhCompare) {
$q, PhFramework) {
function displayResults(rawResultsMap, newRawResultsMap) {
$scope.compareResults = {};
$scope.titles = {};
@ -174,7 +174,7 @@ perf.controller('CompareResultsCtrl', [
newRawResultsMap[sig].name === testName && newRawResultsMap[sig].platform === platform,
);
const cmap = PhCompare.getCounterMap(testName, rawResultsMap[oldSig], newRawResultsMap[newSig]);
const cmap = getCounterMap(testName, rawResultsMap[oldSig], newRawResultsMap[newSig]);
if (cmap.isEmpty) {
return;
}
@ -214,7 +214,7 @@ perf.controller('CompareResultsCtrl', [
cmap.links.push({
title: 'graph',
href: PhCompare.getGraphsLink([...new Set(
href: getGraphsLink([...new Set(
[$scope.originalProject, $scope.newProject])].map(project => ({
projectName: project.name,
signature: oldSig,
@ -242,7 +242,7 @@ perf.controller('CompareResultsCtrl', [
cmap.links.push({
title: 'graph',
href: PhCompare.getGraphsLink([...new Set(
href: getGraphsLink([...new Set(
[$scope.originalProject, $scope.newProject])].map(project => ({
projectName: project.name,
signature: oldSig,
@ -263,7 +263,7 @@ perf.controller('CompareResultsCtrl', [
const noiseMetricTestName = 'Noise Metric';
$scope.compareResults[noiseMetricTestName] = [];
$scope.platformList.forEach(function (platform) {
const cmap = PhCompare.getCounterMap(noiseMetricTestName, $scope.oldStddevVariance[platform], $scope.newStddevVariance[platform]);
const cmap = getCounterMap(noiseMetricTestName, $scope.oldStddevVariance[platform], $scope.newStddevVariance[platform]);
if (cmap.isEmpty) {
return;
}
@ -288,7 +288,7 @@ perf.controller('CompareResultsCtrl', [
$scope.platformList = [];
if ($scope.originalRevision) {
const timeRange = PhCompare.getInterval($scope.originalResultSet.push_timestamp, $scope.newResultSet.push_timestamp);
const timeRange = getInterval($scope.originalResultSet.push_timestamp, $scope.newResultSet.push_timestamp);
// Optimization - if old/new branches are the same collect data in one pass
const resultSetIds = (isEqual($scope.originalProject, $scope.newProject)) ?
[$scope.originalResultSet.id, $scope.newResultSet.id] : [$scope.originalResultSet.id];
@ -302,7 +302,7 @@ perf.controller('CompareResultsCtrl', [
originalSeriesList.map(series => series.platform))];
$scope.testList = [...new Set(
originalSeriesList.map(series => series.name))];
return PhCompare.getResultsMap($scope.originalProject.name,
return getResultsMap($scope.originalProject.name,
originalSeriesList,
{ push_id: resultSetIds });
}).then((resultMaps) => {
@ -329,7 +329,7 @@ perf.controller('CompareResultsCtrl', [
...$scope.testList,
...new Set(newSeriesList.map(series => series.name)),
])];
return PhCompare.getResultsMap($scope.newProject.name,
return getResultsMap($scope.newProject.name,
newSeriesList,
{ push_id: [$scope.newResultSet.id] });
}).then((resultMaps) => {
@ -349,7 +349,7 @@ perf.controller('CompareResultsCtrl', [
const startDateMs = ($scope.newResultSet.push_timestamp -
$scope.selectedTimeRange.value) * 1000;
const endDateMs = $scope.newResultSet.push_timestamp * 1000;
return PhCompare.getResultsMap(
return getResultsMap(
$scope.originalProject.name, originalSeriesList, {
start_date: new Date(startDateMs).toISOString().slice(0, -5),
end_date: new Date(endDateMs).toISOString().slice(0, -5),
@ -368,7 +368,7 @@ perf.controller('CompareResultsCtrl', [
...$scope.testList,
...new Set(newSeriesList.map(series => series.name)),
])];
return PhCompare.getResultsMap($scope.newProject.name,
return getResultsMap($scope.newProject.name,
newSeriesList,
{ push_id: [$scope.newResultSet.id] });
}).then((resultMaps) => {
@ -447,10 +447,7 @@ perf.controller('CompareResultsCtrl', [
$scope.errors = [];
// validation works only for revision to revision comparison
if ($stateParams.originalRevision) {
$scope.errors = PhCompare.validateInput($stateParams.originalProject,
$stateParams.newProject,
$stateParams.originalRevision,
$stateParams.newRevision);
$scope.errors = validateQueryParams($stateParams);
if ($scope.errors.length > 0) {
$scope.dataLoading = false;
@ -515,9 +512,8 @@ perf.controller('CompareResultsCtrl', [
perf.controller('CompareSubtestResultsCtrl', [
'$state', '$stateParams', '$scope', '$q',
'PhCompare', '$httpParamSerializer',
'$httpParamSerializer',
function CompareSubtestResultsCtrl($state, $stateParams, $scope, $q,
PhCompare,
$httpParamSerializer) {
// TODO: duplicated from comparectrl
function verifyRevision(project, revision, rsid) {
@ -573,7 +569,7 @@ perf.controller('CompareSubtestResultsCtrl', [
const oldSig = mapsigs[0];
const newSig = mapsigs[1];
const cmap = PhCompare.getCounterMap(testName, rawResultsMap[oldSig], newRawResultsMap[newSig]);
const cmap = getCounterMap(testName, rawResultsMap[oldSig], newRawResultsMap[newSig]);
if (oldSig === $scope.originalSignature ||
oldSig === $scope.newSignature ||
newSig === $scope.originalSignature ||
@ -596,7 +592,7 @@ perf.controller('CompareSubtestResultsCtrl', [
if ($scope.originalRevision) {
cmap.links = [{
title: 'graph',
href: PhCompare.getGraphsLink([...new Set([
href: getGraphsLink([...new Set([
$scope.originalProject,
$scope.newProject,
])].map(project => ({
@ -622,7 +618,7 @@ perf.controller('CompareSubtestResultsCtrl', [
} else {
cmap.links = [{
title: 'graph',
href: PhCompare.getGraphsLink([...new Set([
href: getGraphsLink([...new Set([
$scope.originalProject,
$scope.newProject,
])].map(project => ({
@ -637,7 +633,7 @@ perf.controller('CompareSubtestResultsCtrl', [
const noiseMetricTestName = 'Noise Metric';
$scope.compareResults[noiseMetricTestName] = [];
const cmap = PhCompare.getCounterMap(noiseMetricTestName, $scope.oldStddevVariance, $scope.newStddevVariance);
const cmap = getCounterMap(noiseMetricTestName, $scope.oldStddevVariance, $scope.newStddevVariance);
if (!cmap.isEmpty) {
cmap.name = testName;
cmap.isNoiseMetric = true;
@ -655,12 +651,7 @@ perf.controller('CompareSubtestResultsCtrl', [
RepositoryModel.getList().then((repos) => {
$scope.errors = [];
if ($stateParams.originalRevision) {
$scope.errors = PhCompare.validateInput($stateParams.originalProject,
$stateParams.newProject,
$stateParams.originalRevision,
$stateParams.newRevision,
$stateParams.originalSignature,
$stateParams.newSignature);
$scope.errors = validateQueryParams($stateParams);
if ($scope.errors.length > 0) {
$scope.dataLoading = false;
@ -772,7 +763,7 @@ perf.controller('CompareSubtestResultsCtrl', [
}).then(function (originalSubtestList) {
$scope.pageList = originalSubtestList.map(subtest => subtest.name);
$scope.platformList = [...new Set(originalSubtestList.map(subtest => subtest.platform))];
return PhCompare.getResultsMap($scope.originalProject.name,
return getResultsMap($scope.originalProject.name,
originalSubtestList,
{ push_id: resultSetIds });
}),
@ -813,7 +804,7 @@ perf.controller('CompareSubtestResultsCtrl', [
...newSeriesList.map(series => series.name),
])];
return PhCompare.getResultsMap($scope.newProject.name,
return getResultsMap($scope.newProject.name,
newSeriesList,
{ push_id: [$scope.newResultSet.id] });
}).then(function (newSeriesMaps) {
@ -857,7 +848,7 @@ perf.controller('CompareSubtestResultsCtrl', [
const startDateMs = ($scope.newResultSet.push_timestamp -
$scope.selectedTimeRange.value) * 1000;
const endDateMs = $scope.newResultSet.push_timestamp * 1000;
return PhCompare.getResultsMap(
return getResultsMap(
$scope.originalProject.name,
originalSubtestList, {
start_date: new Date(startDateMs).toISOString().slice(0, -5),
@ -882,7 +873,7 @@ perf.controller('CompareSubtestResultsCtrl', [
...newSeriesList.map(series => series.name),
])];
return PhCompare.getResultsMap($scope.newProject.name,
return getResultsMap($scope.newProject.name,
newSeriesList,
{ push_id: [$scope.newResultSet.id] });
}).then(function (newSeriesMaps) {

Просмотреть файл

@ -1,301 +0,0 @@
// Remove the eslint-disable when rewriting this file during the React conversion.
/* eslint-disable func-names, no-nested-ternary, object-shorthand, prefer-arrow-callback, prefer-template */
import forIn from 'lodash/forIn';
import chunk from 'lodash/chunk';
import treeherder from '../../treeherder';
import { getApiUrl } from '../../../helpers/url';
import { phTimeRanges } from '../../../helpers/constants';
import PerfSeriesModel from '../../../models/perfSeries';
treeherder.factory('PhCompare', [
'$q', '$http', '$httpParamSerializer', 'math',
function ($q, $http, $httpParamSerializer, math) {
// Used for t_test: default stddev if both sets have only a single value - 15%.
// Should be rare case and it's unreliable, but at least have something.
const STDDEV_DEFAULT_FACTOR = 0.15;
const RATIO_CARE_MIN = 1.02; // We don't care about less than ~2% diff
const T_VALUE_CARE_MIN = 3; // Anything below this is "low" in confidence
const T_VALUE_CONFIDENT = 5; // Anything above this is "high" in confidence
function getClassName(newIsBetter, oldVal, newVal, abs_t_value) {
// NOTE: we care about general ratio rather than how much is new compared
// to old - this could end up with slightly higher or lower threshold
// in practice than indicated by DIFF_CARE_MIN. E.g.:
// - If old is 10 and new is 5, then new = old -50%
// - If old is 5 and new is 10, then new = old + 100%
// And if the threshold was 75% then one would matter and the other wouldn't.
// Instead, we treat both cases as 2.0 (general ratio), and both would matter
// if our threshold was 75% (i.e. DIFF_CARE_MIN = 1.75).
if (!oldVal || !newVal) {
// handle null case
return '';
}
let ratio = newVal / oldVal;
if (ratio < 1) {
ratio = 1 / ratio; // Direction agnostic and always >= 1.
}
if (ratio < RATIO_CARE_MIN || abs_t_value < T_VALUE_CARE_MIN) {
return '';
}
if (abs_t_value < T_VALUE_CONFIDENT) {
// Since we (currently) have only one return value to indicate uncertainty,
// let's use it for regressions only. (Improvement would just not be marked).
return newIsBetter ? '' : 'compare-notsure';
}
return newIsBetter ? 'compare-improvement' : 'compare-regression';
}
return {
// Aggregates two sets of values into a "comparison object" which is later used
// to display a single line of comparison.
// The result object has the following properties:
// - .isEmpty: true if no data for either side.
// If !isEmpty, for originalData/newData (if the data exists)
// - .[original|new]Value // Average of the values
// - .[original|new]Stddev // stddev
// - .[original|new]StddevPct // stddev as percentage of the average
// - .[original|new]Runs // Display data: number of runs and their values
// If both originalData/newData exist, comparison data:
// - .newIsBetter // is new result better or worse (even if unsure)
// - .isImprovement // is new result better + we're confident about it
// - .isRegression // is new result worse + we're confident about it
// - .delta
// - .deltaPercentage
// - .confidence // t-test value
// - .confidenceText // 'low'/'med'/'high'
// - .confidenceTextLong // more explanation on what confidenceText means
// - .isMeaningful // for highlighting - bool over t-test threshold
// And some data to help formatting of the comparison:
// - .className
// - .magnitude
// - .marginDirection
getCounterMap: function getDisplayLineData(testName, originalData, newData) {
function numericCompare(a, b) {
return a < b ? -1 : a > b ? 1 : 0;
}
// Some statistics for a single set of values
function analyzeSet(values, testName) {
let average;
let stddev;
if (testName === 'Noise Metric') {
average = Math.sqrt(values.map(x => x ** 2).reduce((a, b) => a + b, 0));
stddev = 1;
} else {
average = math.average(values);
stddev = math.stddev(values, average);
}
return {
average: average,
stddev: stddev,
stddevPct: Math.round(math.percentOf(stddev, average) * 100) / 100,
// We use slice to keep the original values at their original order
// in case the order is important elsewhere.
runs: values.slice().sort(numericCompare),
};
}
// Eventually the result object, after setting properties as required.
const cmap = { isEmpty: true };
// It's possible to get an object with empty values, so check for that too.
const hasOrig = originalData && originalData.values.length;
const hasNew = newData && newData.values.length;
if (!hasOrig && !hasNew) {
return cmap; // No data for either side
}
cmap.isEmpty = false;
if (hasOrig) {
const orig = analyzeSet(originalData.values, testName);
cmap.originalValue = orig.average;
cmap.originalRuns = orig.runs;
cmap.originalStddev = orig.stddev;
cmap.originalStddevPct = orig.stddevPct;
} else {
cmap.originalRuns = [];
}
if (hasNew) {
const newd = analyzeSet(newData.values, testName);
cmap.newValue = newd.average;
cmap.newRuns = newd.runs;
cmap.newStddev = newd.stddev;
cmap.newStddevPct = newd.stddevPct;
} else {
cmap.newRuns = [];
}
if (!hasOrig || !hasNew) {
return cmap; // No comparison, just display for one side.
}
// keep the framework id so we can filter by that later, if necessary
cmap.frameworkId = originalData.frameworkId;
// Compare the sides.
// Normally tests are "lower is better", can be over-ridden with a series option
cmap.delta = (cmap.newValue - cmap.originalValue);
cmap.newIsBetter = (originalData.lowerIsBetter && cmap.delta < 0) ||
(!originalData.lowerIsBetter && cmap.delta > 0);
// delta percentage (for display)
cmap.deltaPercentage = math.percentOf(cmap.delta, cmap.originalValue);
// arbitrary scale from 0-20% multiplied by 5, capped
// at 100 (so 20% regression === 100% bad)
cmap.magnitude = Math.min(Math.abs(cmap.deltaPercentage) * 5, 100);
const abs_t_value = Math.abs(math.t_test(originalData.values, newData.values, STDDEV_DEFAULT_FACTOR));
cmap.className = getClassName(cmap.newIsBetter, cmap.originalValue, cmap.newValue, abs_t_value);
cmap.confidence = abs_t_value;
cmap.confidenceTextLong = 'Result of running t-test on base versus new result distribution: ';
if (abs_t_value < T_VALUE_CARE_MIN) {
cmap.confidenceText = 'low';
cmap.confidenceTextLong += "A value of 'low' suggests less confidence that there is a sustained, significant change between the two revisions.";
} else if (abs_t_value < T_VALUE_CONFIDENT) {
cmap.confidenceText = 'med';
cmap.confidenceTextLong += "A value of 'med' indicates uncertainty that there is a significant change. If you haven't already, consider retriggering the job to be more sure.";
} else {
cmap.confidenceText = 'high';
cmap.confidenceTextLong += "A value of 'high' indicates more confidence that there is a significant change, however you should check the historical record for the test by looking at the graph to be more sure (some noisy tests can provide inconsistent results).";
}
cmap.isRegression = (cmap.className === 'compare-regression');
cmap.isImprovement = (cmap.className === 'compare-improvement');
cmap.isMeaningful = (cmap.className !== '');
cmap.isComplete = (cmap.originalRuns.length &&
cmap.newRuns.length);
cmap.isConfident = ((cmap.originalRuns.length > 1 &&
cmap.newRuns.length > 1 &&
abs_t_value >= T_VALUE_CONFIDENT) ||
(cmap.originalRuns.length >= 6 &&
cmap.newRuns.length >= 6 &&
abs_t_value >= T_VALUE_CARE_MIN));
cmap.needsMoreRuns = (cmap.isComplete && !cmap.isConfident &&
cmap.originalRuns.length < 6);
cmap.isNoiseMetric = false;
return cmap;
},
getInterval: function (oldTimestamp, newTimestamp) {
const now = (new Date()).getTime() / 1000;
let timeRange = Math.min(oldTimestamp, newTimestamp);
timeRange = Math.round(now - timeRange);
// now figure out which predefined set of data we can query from
const phTimeRange = phTimeRanges.find(i => timeRange <= i.value);
return phTimeRange.value;
},
validateInput: function (originalProject, newProject,
originalRevision, newRevision,
originalSignature, newSignature) {
const errors = [];
if (!originalProject) errors.push('Missing input: originalProject');
if (!newProject) errors.push('Missing input: newProject');
if (!originalRevision) errors.push('Missing input: originalRevision');
if (!newRevision) errors.push('Missing input: newRevision');
if (originalSignature && newSignature) {
if (!originalSignature) errors.push('Missing input: originalSignature');
if (!newSignature) errors.push('Missing input: newSignature');
}
$http.get(getApiUrl('/repository/')).then(function (response) {
if (!response.data.find(project => project.name === originalProject)) {
errors.push("Invalid project, doesn't exist: " + originalProject);
}
if (!response.data.find(project => project.name === newProject)) {
errors.push("Invalid project, doesn't exist: " + newProject);
}
});
return errors;
},
getResultsMap: (projectName, seriesList, params) => {
const resultsMap = {};
return $q.all(chunk(seriesList, 40).map(
seriesChunk => PerfSeriesModel.getSeriesData(
projectName, {
signature_id: seriesChunk.map(series => series.id),
framework: [...new Set(seriesChunk.map(series => series.frameworkId))],
...params,
}).then((seriesData) => {
// Aggregates data from the server on a single group of values which
// will be compared later to another group. Ends up with an object
// with description (name/platform) and values.
// The values are later processed at getCounterMap as the data arguments.
forIn(seriesData, (data, signatureHash) => {
const signature = seriesList.find(series =>
series.signature === signatureHash);
if (signature) {
// helper method to either return the push
// index (if getting per-push results) or
// just the main results map object otherwise
const _getResultMapEntry = (datum) => {
if (params.push_id) {
if (!resultsMap[datum.push_id]) {
resultsMap[datum.push_id] = {};
}
return resultsMap[datum.push_id];
}
return resultsMap;
};
data.forEach((datum) => {
const entry = _getResultMapEntry(datum);
if (!entry[signatureHash]) {
entry[signatureHash] = {
...signature,
values: [datum.value],
};
} else {
entry[signatureHash].values.push(datum.value);
}
});
}
});
}),
)).then(() => resultsMap);
},
getGraphsLink: function (seriesList, resultSets, timeRange) {
let graphsLink = 'perf.html#/graphs?' + $httpParamSerializer({
series: seriesList.map(series => ([
series.projectName,
series.signature, 1,
series.frameworkId,
])),
highlightedRevisions: resultSets.map(resultSet => (
resultSet.revision.slice(0, 12)
)),
});
if (resultSets) {
if (!timeRange) {
graphsLink += '&timerange=' + Math.max(
...resultSets.map(resultSet =>
phTimeRanges.map(range => range.value).find(t =>
((Date.now() / 1000.0) -
resultSet.push_timestamp) < t),
));
} else {
graphsLink += '&timerange=' + timeRange;
}
}
return graphsLink;
},
};
}]);

Просмотреть файл

@ -1,78 +0,0 @@
// Remove the eslint-disable when rewriting this file during the React conversion.
/* eslint-disable func-names, object-shorthand, prefer-arrow-callback */
import treeherder from '../../treeherder';
treeherder.factory('math', [
function () {
function percentOf(a, b) {
return b ? 100 * a / b : 0;
}
function average(values) {
if (values.length < 1) {
return 0;
}
return values.reduce((a, b) => a + b, 0) / values.length;
}
function stddev(values, avg) {
if (values.length < 2) {
return undefined;
}
if (!avg) avg = average(values);
return Math.sqrt(
values.map(v => (v - avg) ** 2)
.reduce(function (a, b) { return a + b; }) / (values.length - 1));
}
// If a set has only one value, assume average-ish-plus sddev, which
// will manifest as smaller t-value the less items there are at the group
// (so quite small for 1 value). This default value is a parameter.
// C/T mean control/test group (in our case original/new data).
function t_test(valuesC, valuesT, stddev_default_factor) {
const lenC = valuesC.length;
const lenT = valuesT.length;
// We must have at least one value at each set
if (lenC < 1 || lenT < 1) {
return 0;
}
const avgC = average(valuesC);
const avgT = average(valuesT);
// Use actual stddev if possible, or stddev_default_factor if one sample
let stddevC = (lenC > 1 ? stddev(valuesC, avgC) : stddev_default_factor * avgC);
let stddevT = (lenT > 1 ? stddev(valuesT, avgT) : stddev_default_factor * avgT);
// If one of the sets has only a single sample, assume its stddev is
// the same as that of the other set (in percentage). If both sets
// have only one sample, both will use stddev_default_factor.
if (lenC === 1) {
stddevC = valuesC[0] * stddevT / avgT;
} else if (lenT === 1) {
stddevT = valuesT[0] * stddevC / avgC;
}
const delta = avgT - avgC;
const stdDiffErr = (
Math.sqrt(
stddevC * stddevC / lenC // control-variance / control-size
+
stddevT * stddevT / lenT, // ...
)
);
return delta / stdDiffErr;
}
return {
percentOf: percentOf,
average: average,
stddev: stddev,
t_test: t_test,
};
}]);

Просмотреть файл

@ -0,0 +1,2 @@
export const tValueCareMin = 3; // Anything below this is "low" in confidence
export const tValueConfidence = 5; // Anything above this is "high" in confidence

334
ui/perfherder/helpers.js Normal file
Просмотреть файл

@ -0,0 +1,334 @@
import chunk from 'lodash/chunk';
import { tValueCareMin, tValueConfidence } from './constants';
import { getApiUrl, createQueryParams } from '../helpers/url';
import { getData } from '../helpers/http';
import PerfSeriesModel from '../models/perfSeries';
import { phTimeRanges } from '../helpers/constants';
export const calcPercentOf = function calcPercentOf(a, b) {
return b ? 100 * a / b : 0;
};
export const calcAverage = function calcAverage(values) {
if (!values.length) {
return 0;
}
return values.reduce((a, b) => a + b, 0) / values.length;
};
export const getStdDev = function getStandardDeviation(values, avg) {
if (values.length < 2) {
return undefined;
}
if (!avg) avg = calcAverage(values);
return Math.sqrt(
values.map(v => (v - avg) ** 2).reduce((a, b) => a + b) / (values.length - 1));
};
// If a set has only one value, assume average-ish-plus standard deviation, which
// will manifest as smaller t-value the less items there are at the group
// (so quite small for 1 value). This default value is a parameter.
// C/T mean control/test group (in our case original/new data).
export const getTTest = function getTTest(valuesC, valuesT, stddev_default_factor) {
const lenC = valuesC.length;
const lenT = valuesT.length;
if (!lenC || !lenT) {
return 0;
}
const avgC = calcAverage(valuesC);
const avgT = calcAverage(valuesT);
let stddevC = (lenC > 1 ? getStdDev(valuesC, avgC) : stddev_default_factor * avgC);
let stddevT = (lenT > 1 ? getStdDev(valuesT, avgT) : stddev_default_factor * avgT);
if (lenC === 1) {
stddevC = valuesC[0] * stddevT / avgT;
} else if (lenT === 1) {
stddevT = valuesT[0] * stddevC / avgC;
}
const delta = avgT - avgC;
const stdDiffErr = (
Math.sqrt(
(stddevC * stddevC / lenC) // control-variance / control-size
+
(stddevT * stddevT / lenT),
)
);
return delta / stdDiffErr;
};
// TODO many of these are only used in one controller so can likely be moved
// into the appropriate react component
const numericCompare = (a, b) => {
if (a < b) {
return -1;
}
if (a > b) {
return 1;
}
return 0;
};
const analyzeSet = (values, testName) => {
let average;
let stddev = 1;
if (testName === 'Noise Metric') {
average = Math.sqrt(values.map(x => x ** 2).reduce((a, b) => a + b, 0));
} else {
average = calcAverage(values);
stddev = getStdDev(values, average);
}
return {
average,
stddev,
stddevPct: Math.round(calcPercentOf(stddev, average) * 100) / 100,
// TODO verify this is needed
// We use slice to keep the original values at their original order
// in case the order is important elsewhere.
runs: values.slice().sort(numericCompare),
};
};
const getClassName = (newIsBetter, oldVal, newVal, absTValue) => {
// Returns a class name, if any, based on a relative change in the absolute value
if (!oldVal || !newVal) {
return '';
}
let ratio = newVal / oldVal;
if (ratio < 1) {
ratio = 1 / ratio; // Direction agnostic and always >= 1.
}
if (ratio < 1.02 || absTValue < tValueCareMin) {
return '';
}
if (absTValue < tValueConfidence) {
return newIsBetter ? '' : 'compare-notsure';
}
return newIsBetter ? 'compare-improvement' : 'compare-regression';
};
// Aggregates two sets of values into a "comparison object" which is later used
// to display a single line of comparison.
// The result object has the following properties:
// - .isEmpty: true if no data for either side.
// If !isEmpty, for originalData/newData (if the data exists)
// - .[original|new]Value // Average of the values
// - .[original|new]Stddev // stddev
// - .[original|new]StddevPct // stddev as percentage of the average
// - .[original|new]Runs // Display data: number of runs and their values
// If both originalData/newData exist, comparison data:
// - .newIsBetter // is new result better or worse (even if unsure)
// - .isImprovement // is new result better + we're confident about it
// - .isRegression // is new result worse + we're confident about it
// - .delta
// - .deltaPercentage
// - .confidence // t-test value
// - .confidenceText // 'low'/'med'/'high'
// - .confidenceTextLong // more explanation on what confidenceText means
// - .isMeaningful // for highlighting - bool over t-test threshold
// And some data to help formatting of the comparison:
// - .className
// - .magnitude
// - .marginDirection
export const getCounterMap = function getCounterMap(testName, originalData, newData) {
// TODO setting this value seems a bit odd, look into how its being used
const cmap = { isEmpty: false };
const hasOrig = originalData && originalData.values.length;
const hasNew = newData && newData.values.length;
if (!hasOrig && !hasNew) {
cmap.isEmpty = true;
return cmap;
}
if (hasOrig) {
const orig = analyzeSet(originalData.values, testName);
cmap.originalValue = orig.average;
cmap.originalRuns = orig.runs;
cmap.originalStddev = orig.stddev;
cmap.originalStddevPct = orig.stddevPct;
} else {
cmap.originalRuns = [];
}
if (hasNew) {
const newd = analyzeSet(newData.values, testName);
cmap.newValue = newd.average;
cmap.newRuns = newd.runs;
cmap.newStddev = newd.stddev;
cmap.newStddevPct = newd.stddevPct;
} else {
cmap.newRuns = [];
}
if (!hasOrig || !hasNew) {
return cmap; // No comparison, just display for one side.
}
cmap.frameworkId = originalData.frameworkId;
// Normally tests are "lower is better", can be over-ridden with a series option
cmap.delta = (cmap.newValue - cmap.originalValue);
cmap.newIsBetter = (originalData.lowerIsBetter && cmap.delta < 0) ||
(!originalData.lowerIsBetter && cmap.delta > 0);
cmap.deltaPercentage = calcPercentOf(cmap.delta, cmap.originalValue);
// arbitrary scale from 0-20% multiplied by 5, capped
// at 100 (so 20% regression === 100% bad)
cmap.magnitude = Math.min(Math.abs(cmap.deltaPercentage) * 5, 100);
// 0.15 is used for getTTest: default stddev if both sets have only a single value - 15%.
// Should be rare case and it's unreliable, but at least have something.
const absTValue = Math.abs(getTTest(originalData.values, newData.values, 0.15));
cmap.className = getClassName(cmap.newIsBetter, cmap.originalValue, cmap.newValue, absTValue);
cmap.confidence = absTValue;
cmap.confidenceTextLong = 'Result of running t-test on base versus new result distribution: ';
if (absTValue < tValueCareMin) {
cmap.confidenceText = 'low';
cmap.confidenceTextLong += "A value of 'low' suggests less confidence that there is a sustained, significant change between the two revisions.";
} else if (absTValue < tValueConfidence) {
cmap.confidenceText = 'med';
cmap.confidenceTextLong += "A value of 'med' indicates uncertainty that there is a significant change. If you haven't already, consider retriggering the job to be more sure.";
} else {
cmap.confidenceText = 'high';
cmap.confidenceTextLong += "A value of 'high' indicates more confidence that there is a significant change, however you should check the historical record for the test by looking at the graph to be more sure (some noisy tests can provide inconsistent results).";
}
cmap.isRegression = (cmap.className === 'compare-regression');
cmap.isImprovement = (cmap.className === 'compare-improvement');
cmap.isMeaningful = (cmap.className !== '');
cmap.isComplete = (cmap.originalRuns.length &&
cmap.newRuns.length);
cmap.isConfident = ((cmap.originalRuns.length > 1 &&
cmap.newRuns.length > 1 &&
absTValue >= tValueConfidence) ||
(cmap.originalRuns.length >= 6 &&
cmap.newRuns.length >= 6 &&
absTValue >= tValueCareMin));
cmap.needsMoreRuns = (cmap.isComplete && !cmap.isConfident &&
cmap.originalRuns.length < 6);
cmap.isNoiseMetric = false;
return cmap;
};
// TODO: move into a react component as this is only used once (in PhCompare controller)
export const getInterval = function getInterval(oldTimestamp, newTimestamp) {
const now = (new Date()).getTime() / 1000;
let timeRange = Math.min(oldTimestamp, newTimestamp);
timeRange = Math.round(now - timeRange);
const newTimeRange = phTimeRanges.find(time => timeRange <= time.value);
return newTimeRange.value;
};
// TODO possibly break up into different functions and/or move into a component
export const validateQueryParams = async function validateQueryParams(params) {
const { originalProject, newProject, originalRevision, newRevision, originalSignature,
newSignature } = params;
const errors = [];
if (!originalProject) errors.push('Missing input: originalProject');
if (!newProject) errors.push('Missing input: newProject');
if (!originalRevision) errors.push('Missing input: originalRevision');
if (!newRevision) errors.push('Missing input: newRevision');
if (originalSignature && newSignature) {
if (!originalSignature) errors.push('Missing input: originalSignature');
if (!newSignature) errors.push('Missing input: newSignature');
}
const { data, failureStatus } = await getData(getApiUrl('/repository/'));
if (!failureStatus && data.find(project => project.name === originalProject)) {
errors.push(`Invalid project, doesn't exist ${originalProject}`);
}
if (!failureStatus && data.find(project => project.name === newProject)) {
errors.push(`Invalid project, doesn't exist ${newProject}`);
}
return errors;
};
const getResultMapEntry = (datum, resultsMap, params) => {
if (params.push_id) {
if (!resultsMap[datum.push_id]) {
resultsMap[datum.push_id] = {};
}
return resultsMap[datum.push_id];
}
return resultsMap;
};
export const getResultsMap = function getResultsMap(projectName, seriesList, params) {
const resultsMap = {};
return Promise.all(chunk(seriesList, 150).map(
seriesChunk => PerfSeriesModel.getSeriesData(
projectName, {
signature_id: seriesChunk.map(series => series.id),
framework: [...new Set(seriesChunk.map(series => series.frameworkId))],
...params,
}).then((seriesData) => {
// Aggregates data from a single group of values and returns an object containing
// description (name/platform) and values; these are later processed in getCounterMap.
for (const [signatureHash, data] of Object.entries(seriesData)) {
const signature = seriesList.find(series => series.signature === signatureHash);
if (signature) {
data.forEach((datum) => {
const entry = getResultMapEntry(datum, resultsMap, params);
if (!entry[signatureHash]) {
entry[signatureHash] = {
...signature,
values: [datum.value],
};
} else {
entry[signatureHash].values.push(datum.value);
}
});
}
}
}),
)).then(() => resultsMap);
};
export const getGraphsLink = function getGraphsLink(seriesList, resultSets, timeRange) {
const params = {
series: seriesList.map(series => ([
series.projectName,
series.signature, 1,
series.frameworkId,
])),
highlightedRevisions: resultSets.map(resultSet => (
resultSet.revision.slice(0, 12)
)),
};
if (resultSets && !timeRange) {
params.timerange = Math.max(
...resultSets.map(resultSet =>
phTimeRanges.map(range => range.value).find(t =>
((Date.now() / 1000.0) - resultSet.push_timestamp) < t),
));
}
if (timeRange) {
params.timerange = timeRange;
}
return `perf.html#/graphs${createQueryParams(params)}`;
};