зеркало из https://github.com/mozilla/treeherder.git
Bug 1485090 - Convert ResultSet Model to ReactJS
This commit is contained in:
Родитель
d8c8954866
Коммит
3af1db6b2c
|
@ -2,55 +2,33 @@ import * as fetchMock from 'fetch-mock';
|
|||
|
||||
import { getProjectUrl } from '../../../../ui/helpers/url';
|
||||
|
||||
describe('ThResultSetStore', function () {
|
||||
describe('ThResultSetStore', () => {
|
||||
|
||||
let $httpBackend;
|
||||
let model;
|
||||
const repoName = 'mozilla-inbound';
|
||||
|
||||
beforeEach(angular.mock.module('treeherder'));
|
||||
|
||||
beforeEach(inject(function ($injector, $controller,
|
||||
ThResultSetStore) {
|
||||
beforeEach(inject((ThResultSetStore) => {
|
||||
|
||||
$httpBackend = $injector.get('$httpBackend');
|
||||
jasmine.getJSONFixtures().fixturesPath = 'base/tests/ui/mock';
|
||||
|
||||
fetchMock.get(
|
||||
'https://treestatus.mozilla-releng.net/trees/mozilla-inbound',
|
||||
{
|
||||
result: {
|
||||
status: 'approval required',
|
||||
message_of_the_day: 'I before E',
|
||||
tree: 'mozilla-inbound',
|
||||
reason: '',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
$httpBackend.whenGET(getProjectUrl('/resultset/?count=10&full=true', repoName)).respond(
|
||||
fetchMock.get(getProjectUrl('/resultset/?full=true&count=10', repoName),
|
||||
getJSONFixture('push_list.json'),
|
||||
);
|
||||
|
||||
fetchMock.get(
|
||||
getProjectUrl('/jobs/?return_type=list&result_set_id=1&count=2000', repoName),
|
||||
getProjectUrl('/jobs/?return_type=list&count=2000&result_set_id=1', repoName),
|
||||
getJSONFixture('job_list/job_1.json'),
|
||||
);
|
||||
|
||||
fetchMock.get(
|
||||
getProjectUrl('/jobs/?return_type=list&result_set_id=2&count=2000', repoName),
|
||||
getProjectUrl('/jobs/?return_type=list&count=2000&result_set_id=2', repoName),
|
||||
getJSONFixture('job_list/job_2.json'),
|
||||
);
|
||||
|
||||
$httpBackend.whenGET('/api/repository/').respond(
|
||||
getJSONFixture('repositories.json'),
|
||||
);
|
||||
|
||||
model = ThResultSetStore;
|
||||
model.initRepository(repoName);
|
||||
model.fetchPushes(10);
|
||||
|
||||
$httpBackend.flush();
|
||||
}));
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -60,11 +38,13 @@ describe('ThResultSetStore', function () {
|
|||
/*
|
||||
Tests ThResultSetStore
|
||||
*/
|
||||
it('should have 2 resultset', () => {
|
||||
it('should have 2 resultset', async () => {
|
||||
await model.fetchPushes(10);
|
||||
expect(model.getPushArray().length).toBe(2);
|
||||
});
|
||||
|
||||
it('should have id of 1 in foreground (current) repo', () => {
|
||||
it('should have id of 1 in current repo', async () => {
|
||||
await model.fetchPushes(10);
|
||||
expect(model.getPushArray()[0].id).toBe(1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -32,7 +32,6 @@ import './js/services/main';
|
|||
import './js/services/buildapi';
|
||||
import './js/services/classifications';
|
||||
import './js/services/jobfilters';
|
||||
import './js/models/resultset';
|
||||
import './js/models/resultsets_store';
|
||||
import './js/models/perf/series';
|
||||
import './js/controllers/main';
|
||||
|
|
|
@ -19,6 +19,5 @@ import './js/directives/treeherder/log_viewer_steps';
|
|||
import './js/directives/treeherder/main';
|
||||
import './js/components/logviewer/logviewer';
|
||||
import './js/services/main';
|
||||
import './js/models/resultset';
|
||||
import './js/filters';
|
||||
import './js/controllers/logviewer';
|
||||
|
|
|
@ -28,7 +28,6 @@ import './shared/Login';
|
|||
// Perf JS
|
||||
import './js/filters';
|
||||
import './js/services/main';
|
||||
import './js/models/resultset';
|
||||
import './js/models/perf/series';
|
||||
import './js/models/perf/issue_tracker';
|
||||
import './js/models/perf/performance_framework';
|
||||
|
|
|
@ -34,10 +34,11 @@ Helper method for constructing an error message from Taskcluster.
|
|||
export const formatTaskclusterError = function formatTaskclusterError(e) {
|
||||
const TC_ERROR_PREFIX = 'Taskcluster: ';
|
||||
const err = e.body || e;
|
||||
const errorMessage = err.message || err.toString();
|
||||
|
||||
if (err.message.indexOf('----') !== -1) {
|
||||
return `${TC_ERROR_PREFIX}${err.message.split('----')[0]}`;
|
||||
if (errorMessage.indexOf('----') !== -1) {
|
||||
return `${TC_ERROR_PREFIX}${errorMessage.split('----')[0]}`;
|
||||
}
|
||||
|
||||
return `${TC_ERROR_PREFIX}${err.message}`;
|
||||
return `${TC_ERROR_PREFIX}${errorMessage}`;
|
||||
};
|
||||
|
|
|
@ -1,7 +1,17 @@
|
|||
import { thDefaultRepo } from '../js/constants';
|
||||
|
||||
export const getQueryString = function getQueryString() {
|
||||
return location.hash.split('?')[1];
|
||||
};
|
||||
|
||||
export const getAllUrlParams = function getAllUrlParams() {
|
||||
return new URLSearchParams(location.hash.split('?')[1]);
|
||||
return new URLSearchParams(getQueryString());
|
||||
};
|
||||
|
||||
export const getUrlParam = function getUrlParam(name) {
|
||||
return getAllUrlParams().get(name);
|
||||
};
|
||||
|
||||
export const getRepo = function getRepo() {
|
||||
return getUrlParam('repo') || thDefaultRepo;
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import taskcluster from './taskcluster';
|
||||
import { getUrlParam, getAllUrlParams } from './location';
|
||||
import { getAllUrlParams, getRepo } from './location';
|
||||
|
||||
export const uiJobsUrlBase = '/#/jobs';
|
||||
|
||||
|
@ -76,7 +76,7 @@ export const getPerfAnalysisUrl = function getPerfAnalysisUrl(url) {
|
|||
// URL. If not there, then try m-i and hope for the best. The caller may
|
||||
// not actually need a repo if they're trying to get a job by ``id``.
|
||||
export const getProjectUrl = function getProjectUrl(uri, repoName) {
|
||||
const repo = repoName || getUrlParam('repo') || 'mozilla-inbound';
|
||||
const repo = repoName || getRepo();
|
||||
|
||||
return getApiUrl(`/project/${repo}${uri}`);
|
||||
};
|
||||
|
@ -113,11 +113,10 @@ export const graphsEndpoint = 'failurecount/';
|
|||
|
||||
export const parseQueryParams = function parseQueryParams(search) {
|
||||
const params = new URLSearchParams(search);
|
||||
const obj = {};
|
||||
for (const [key, value] of params.entries()) {
|
||||
obj[key] = value;
|
||||
}
|
||||
return obj;
|
||||
|
||||
return [...params.entries()].reduce((acc, [key, value]) => (
|
||||
{ ...acc, [key]: value }
|
||||
), {});
|
||||
};
|
||||
|
||||
// TODO: Combine this with getApiUrl().
|
||||
|
|
|
@ -16,8 +16,6 @@ export default class CustomJobActions extends React.Component {
|
|||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
this.taskclusterModel = new TaskclusterModel(props.notify);
|
||||
|
||||
this.state = {
|
||||
ajv: new Ajv({ format: 'full', verbose: true, allErrors: true }),
|
||||
decisionTaskId: null,
|
||||
|
@ -41,7 +39,7 @@ export default class CustomJobActions extends React.Component {
|
|||
this.triggerAction = this.triggerAction.bind(this);
|
||||
|
||||
pushModel.getGeckoDecisionTaskId(pushId).then((decisionTaskId) => {
|
||||
this.taskclusterModel.load(decisionTaskId, job).then((results) => {
|
||||
TaskclusterModel.load(decisionTaskId, job).then((results) => {
|
||||
const { originalTask, originalTaskId, staticActionVariables, actions } = results;
|
||||
const actionOptions = actions.map(action => ({ value: action, label: action.title }));
|
||||
|
||||
|
@ -109,7 +107,7 @@ export default class CustomJobActions extends React.Component {
|
|||
}
|
||||
}
|
||||
|
||||
this.taskclusterModel.submit({
|
||||
TaskclusterModel.submit({
|
||||
action,
|
||||
actionTaskId: slugid(),
|
||||
decisionTaskId,
|
||||
|
|
|
@ -13,7 +13,7 @@ import UpdateAvailable from './headerbars/UpdateAvailable';
|
|||
import PushList from './PushList';
|
||||
import PrimaryNavBar from './headerbars/PrimaryNavBar';
|
||||
import RepositoryModel from '../models/repository';
|
||||
import { getUrlParam } from '../helpers/location';
|
||||
import { getRepo } from '../helpers/location';
|
||||
|
||||
const DEFAULT_DETAILS_PCT = 40;
|
||||
const REVISION_POLL_INTERVAL = 1000 * 60 * 5;
|
||||
|
@ -40,7 +40,7 @@ class JobView extends React.Component {
|
|||
this.history = createBrowserHistory();
|
||||
|
||||
this.state = {
|
||||
repoName: getUrlParam('repo'),
|
||||
repoName: getRepo(),
|
||||
user: { isLoggedIn: false, isStaff: false },
|
||||
isFieldFilterVisible: false,
|
||||
filterBarFilters: [
|
||||
|
@ -56,7 +56,10 @@ class JobView extends React.Component {
|
|||
}
|
||||
|
||||
static getDerivedStateFromProps(props) {
|
||||
return JobView.getSplitterDimensions(props);
|
||||
return {
|
||||
...JobView.getSplitterDimensions(props),
|
||||
repoName: getRepo(),
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { getUrlParam } from '../helpers/location';
|
||||
import { formatModelError, formatTaskclusterError } from '../helpers/errorMessage';
|
||||
import { formatTaskclusterError } from '../helpers/errorMessage';
|
||||
import { thEvents } from '../js/constants';
|
||||
import CustomJobActions from './CustomJobActions';
|
||||
import PushModel from '../models/push';
|
||||
|
||||
export default class PushActionMenu extends React.PureComponent {
|
||||
|
||||
|
@ -14,7 +15,6 @@ export default class PushActionMenu extends React.PureComponent {
|
|||
this.$rootScope = $injector.get('$rootScope');
|
||||
this.thNotify = $injector.get('thNotify');
|
||||
this.ThResultSetStore = $injector.get('ThResultSetStore');
|
||||
this.ThResultSetModel = $injector.get('ThResultSetModel');
|
||||
this.$uibModal = $injector.get('$uibModal');
|
||||
|
||||
this.revision = this.props.revision;
|
||||
|
@ -63,16 +63,14 @@ export default class PushActionMenu extends React.PureComponent {
|
|||
|
||||
this.ThResultSetStore.getGeckoDecisionTaskId(this.pushId)
|
||||
.then((decisionTaskID) => {
|
||||
this.ThResultSetModel.triggerMissingJobs(decisionTaskID)
|
||||
PushModel.triggerMissingJobs(decisionTaskID)
|
||||
.then((msg) => {
|
||||
this.thNotify.send(msg, 'success');
|
||||
}, (e) => {
|
||||
this.thNotify.send(
|
||||
formatModelError(e, "The action 'trigger missing jobs' failed"),
|
||||
'danger',
|
||||
{ sticky: true },
|
||||
);
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -88,16 +86,14 @@ export default class PushActionMenu extends React.PureComponent {
|
|||
|
||||
this.ThResultSetStore.getGeckoDecisionTaskId(this.pushId)
|
||||
.then((decisionTaskID) => {
|
||||
this.ThResultSetModel.triggerAllTalosJobs(times, decisionTaskID)
|
||||
PushModel.triggerAllTalosJobs(times, decisionTaskID)
|
||||
.then((msg) => {
|
||||
this.thNotify.send(msg, 'success');
|
||||
}, (e) => {
|
||||
this.thNotify.send(
|
||||
formatTaskclusterError(e),
|
||||
'danger',
|
||||
{ sticky: true },
|
||||
);
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import { toDateStr } from '../helpers/display';
|
|||
import { formatModelError, formatTaskclusterError } from '../helpers/errorMessage';
|
||||
import { thEvents } from '../js/constants';
|
||||
import { getJobsUrl } from '../helpers/url';
|
||||
import PushModel from '../models/push';
|
||||
|
||||
function Author(props) {
|
||||
const authorMatch = props.author.match(/\<(.*?)\>+/);
|
||||
|
@ -62,14 +63,12 @@ export default class PushHeader extends React.PureComponent {
|
|||
this.thNotify = $injector.get('thNotify');
|
||||
this.thBuildApi = $injector.get('thBuildApi');
|
||||
this.ThResultSetStore = $injector.get('ThResultSetStore');
|
||||
this.ThResultSetModel = $injector.get('ThResultSetModel');
|
||||
|
||||
this.pushDateStr = toDateStr(pushTimestamp);
|
||||
this.revisionPushFilterUrl = getJobsUrl({ repo: repoName, revision });
|
||||
this.authorPushFilterUrl = getJobsUrl({ repo: repoName, author });
|
||||
|
||||
this.pinAllShownJobs = this.pinAllShownJobs.bind(this);
|
||||
this.triggerNewJobs = this.triggerNewJobs.bind(this);
|
||||
this.cancelAllJobs = this.cancelAllJobs.bind(this);
|
||||
|
||||
this.state = {
|
||||
|
@ -80,6 +79,8 @@ export default class PushHeader extends React.PureComponent {
|
|||
}
|
||||
|
||||
componentWillMount() {
|
||||
this.triggerNewJobs = this.triggerNewJobs.bind(this);
|
||||
|
||||
this.toggleRunnableJobUnlisten = this.$rootScope.$on(
|
||||
thEvents.selectRunnableJob, (ev, runnableJobs, pushId) => {
|
||||
if (this.props.pushId === pushId) {
|
||||
|
@ -121,16 +122,19 @@ export default class PushHeader extends React.PureComponent {
|
|||
}
|
||||
if (isLoggedIn) {
|
||||
const builderNames = this.ThResultSetStore.getSelectedRunnableJobs(pushId);
|
||||
this.ThResultSetStore.getGeckoDecisionTaskId(pushId).then((decisionTaskID) => {
|
||||
this.ThResultSetModel.triggerNewJobs(builderNames, decisionTaskID).then((result) => {
|
||||
this.thNotify.send(result, 'success');
|
||||
this.ThResultSetStore.deleteRunnableJobs(pushId);
|
||||
this.props.hideRunnableJobsCb();
|
||||
this.setState({ runnableJobsSelected: false });
|
||||
}, (e) => {
|
||||
this.ThResultSetStore.getGeckoDecisionTaskId(pushId)
|
||||
.then((decisionTaskID) => {
|
||||
PushModel.triggerNewJobs(builderNames, decisionTaskID).then((result) => {
|
||||
this.thNotify.send(result, 'success');
|
||||
this.ThResultSetStore.deleteRunnableJobs(pushId);
|
||||
this.props.hideRunnableJobsCb();
|
||||
this.setState({ runnableJobsSelected: false });
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
}).catch((e) => {
|
||||
this.thNotify.send(formatTaskclusterError(e), 'danger', { sticky: true });
|
||||
});
|
||||
});
|
||||
} else {
|
||||
this.thNotify.send('Must be logged in to trigger a job', 'danger');
|
||||
}
|
||||
|
@ -142,7 +146,7 @@ export default class PushHeader extends React.PureComponent {
|
|||
this.setState({ showConfirmCancelAll: false });
|
||||
if (!isLoggedIn) return;
|
||||
|
||||
this.ThResultSetModel.cancelAll(pushId).then(() => (
|
||||
PushModel.cancelAll(pushId).then(() => (
|
||||
this.thBuildApi.cancelAll(repoName, revision)
|
||||
)).catch((e) => {
|
||||
this.thNotify.send(
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
import PushLoadErrors from './PushLoadErrors';
|
||||
import { thEvents } from '../js/constants';
|
||||
import JobModel from '../models/job';
|
||||
import PushModel from '../models/push';
|
||||
|
||||
export default class PushList extends React.Component {
|
||||
|
||||
|
@ -25,11 +26,9 @@ export default class PushList extends React.Component {
|
|||
this.thNotify = $injector.get('thNotify');
|
||||
this.thJobFilters = $injector.get('thJobFilters');
|
||||
this.ThResultSetStore = $injector.get('ThResultSetStore');
|
||||
this.ThResultSetModel = $injector.get('ThResultSetModel');
|
||||
|
||||
this.ThResultSetStore.initRepository(repoName);
|
||||
|
||||
this.getNextPushes = this.getNextPushes.bind(this);
|
||||
this.updateUrlFromchange = this.updateUrlFromchange.bind(this);
|
||||
this.closeJob = this.closeJob.bind(this);
|
||||
|
||||
|
@ -40,13 +39,12 @@ export default class PushList extends React.Component {
|
|||
};
|
||||
|
||||
// get our first set of resultsets
|
||||
this.ThResultSetStore.fetchPushes(
|
||||
this.ThResultSetStore.defaultPushCount,
|
||||
true,
|
||||
);
|
||||
this.ThResultSetStore.fetchPushes(this.ThResultSetStore.defaultPushCount);
|
||||
}
|
||||
|
||||
componentWillMount() {
|
||||
this.getNextPushes = this.getNextPushes.bind(this);
|
||||
|
||||
this.pushesLoadedUnlisten = this.$rootScope.$on(thEvents.pushesLoaded, () => {
|
||||
const pushList = this.ThResultSetStore.getPushArray();
|
||||
this.$timeout(() => {
|
||||
|
@ -107,7 +105,7 @@ export default class PushList extends React.Component {
|
|||
this.jobsClassifiedUnlisten();
|
||||
}
|
||||
|
||||
getNextPushes(count, keepFilters) {
|
||||
getNextPushes(count) {
|
||||
this.setState({ loadingPushes: true });
|
||||
const revision = this.$location.search().revision;
|
||||
if (revision) {
|
||||
|
@ -115,7 +113,7 @@ export default class PushList extends React.Component {
|
|||
this.$location.search('revision', null);
|
||||
this.$location.search('tochange', revision);
|
||||
}
|
||||
this.ThResultSetStore.fetchPushes(count, keepFilters)
|
||||
this.ThResultSetStore.fetchPushes(count)
|
||||
.then(this.updateUrlFromchange);
|
||||
|
||||
}
|
||||
|
@ -140,16 +138,19 @@ export default class PushList extends React.Component {
|
|||
// If the ``selectedJob`` was not mapped, then we need to notify
|
||||
// the user it's not in the range of the current result set list.
|
||||
JobModel.get(repoName, selectedJobId).then((job) => {
|
||||
this.ThResultSetModel.getResultSet(repoName, job.result_set_id).then((push) => {
|
||||
this.$location.search('selectedJob', null);
|
||||
const url = `${urlBasePath}?repo=${repoName}&revision=${push.data.revision}&selectedJob=${selectedJobId}`;
|
||||
|
||||
// the job exists, but isn't in any loaded push.
|
||||
// provide a message and link to load the right push
|
||||
this.thNotify.send(`Selected job id: ${selectedJobId} not within current push range.`,
|
||||
'danger',
|
||||
{ sticky: true, linkText: 'Load push', url });
|
||||
PushModel.get(job.result_set_id).then(async (resp) => {
|
||||
if (resp.ok) {
|
||||
const push = await resp.json();
|
||||
this.$location.search('selectedJob', null);
|
||||
const url = `${urlBasePath}?repo=${repoName}&revision=${push.data.revision}&selectedJob=${selectedJobId}`;
|
||||
|
||||
// the job exists, but isn't in any loaded push.
|
||||
// provide a message and link to load the right push
|
||||
this.thNotify.send(
|
||||
`Selected job id: ${selectedJobId} not within current push range.`,
|
||||
'danger',
|
||||
{ sticky: true, linkText: 'Load push', url });
|
||||
}
|
||||
});
|
||||
}).catch((error) => {
|
||||
// the job wasn't found in the db. Either never existed,
|
||||
|
@ -305,7 +306,7 @@ export default class PushList extends React.Component {
|
|||
{[10, 20, 50].map(count => (
|
||||
<div
|
||||
className="btn btn-light-bordered"
|
||||
onClick={() => (this.getNextPushes(count, true))}
|
||||
onClick={() => (this.getNextPushes(count))}
|
||||
key={count}
|
||||
>{count}</div>
|
||||
))}
|
||||
|
|
|
@ -29,8 +29,6 @@ export default class ActionBar extends React.Component {
|
|||
this.$rootScope = $injector.get('$rootScope');
|
||||
this.$timeout = $injector.get('$timeout');
|
||||
|
||||
this.taskclusterModel = new TaskclusterModel(this.thNotify);
|
||||
|
||||
this.state = {
|
||||
customJobActionsShowing: false,
|
||||
};
|
||||
|
@ -132,13 +130,13 @@ export default class ActionBar extends React.Component {
|
|||
if (selectedJob.build_system_type === 'taskcluster' || selectedJob.reason.startsWith('Created by BBB for task')) {
|
||||
this.ThResultSetStore.getGeckoDecisionTaskId(
|
||||
selectedJob.result_set_id).then(decisionTaskId => (
|
||||
this.taskclusterModel.load(decisionTaskId, selectedJob).then((results) => {
|
||||
TaskclusterModel.load(decisionTaskId, selectedJob).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
if (results) {
|
||||
const backfilltask = results.actions.find(result => result.name === 'backfill');
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (backfilltask) {
|
||||
return this.taskclusterModel.submit({
|
||||
return TaskclusterModel.submit({
|
||||
action: backfilltask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
import { thMaxPushFetchSize } from '../../../js/constants';
|
||||
import { toDateStr, toShortDateStr } from '../../../helpers/display';
|
||||
import { getBtnClass, getStatus } from '../../../helpers/job';
|
||||
import { getSlaveHealthUrl, getJobsUrl } from '../../../helpers/url';
|
||||
import JobModel from '../../../models/job';
|
||||
import PushModel from '../../../models/push';
|
||||
import TextLogStepModel from '../../../models/textLogStep';
|
||||
|
||||
export default class SimilarJobsTab extends React.Component {
|
||||
|
@ -13,7 +15,6 @@ export default class SimilarJobsTab extends React.Component {
|
|||
|
||||
const { $injector } = this.props;
|
||||
this.$rootScope = $injector.get('$rootScope');
|
||||
this.ThResultSetModel = $injector.get('ThResultSetModel');
|
||||
this.thNotify = $injector.get('thNotify');
|
||||
this.thClassificationTypes = $injector.get('thClassificationTypes');
|
||||
|
||||
|
@ -69,21 +70,27 @@ export default class SimilarJobsTab extends React.Component {
|
|||
// create an array of unique push ids
|
||||
const pushIds = [...new Set(newSimilarJobs.map(job => job.result_set_id))];
|
||||
// get pushes and revisions for the given ids
|
||||
const pushListResp = await this.ThResultSetModel.getResultSetList(repoName, pushIds, true);
|
||||
const pushList = pushListResp.data;
|
||||
// decorate the list of jobs with their result sets
|
||||
const pushes = pushList.results.reduce((acc, push) => (
|
||||
{ ...acc, [push.id]: push }
|
||||
), {});
|
||||
newSimilarJobs.forEach((simJob) => {
|
||||
simJob.result_set = pushes[simJob.result_set_id];
|
||||
simJob.revisionResultsetFilterUrl = getJobsUrl({ repo: repoName, revision: simJob.result_set.revisions[0].revision });
|
||||
simJob.authorResultsetFilterUrl = getJobsUrl({ repo: repoName, author: simJob.result_set.author });
|
||||
});
|
||||
this.setState({ similarJobs: [...similarJobs, ...newSimilarJobs] });
|
||||
// on the first page show the first element info by default
|
||||
if (!selectedSimilarJob && newSimilarJobs.length > 0) {
|
||||
this.showJobInfo(newSimilarJobs[0]);
|
||||
let pushList = { results: [] };
|
||||
const resp = await PushModel.getList({ id__in: pushIds.join(','), count: thMaxPushFetchSize });
|
||||
|
||||
if (resp.ok) {
|
||||
pushList = await resp.json();
|
||||
// decorate the list of jobs with their result sets
|
||||
const pushes = pushList.results.reduce((acc, push) => (
|
||||
{ ...acc, [push.id]: push }
|
||||
), {});
|
||||
newSimilarJobs.forEach((simJob) => {
|
||||
simJob.result_set = pushes[simJob.result_set_id];
|
||||
simJob.revisionResultsetFilterUrl = getJobsUrl({ repo: repoName, revision: simJob.result_set.revisions[0].revision });
|
||||
simJob.authorResultsetFilterUrl = getJobsUrl({ repo: repoName, author: simJob.result_set.author });
|
||||
});
|
||||
this.setState({ similarJobs: [...similarJobs, ...newSimilarJobs] });
|
||||
// on the first page show the first element info by default
|
||||
if (!selectedSimilarJob && newSimilarJobs.length > 0) {
|
||||
this.showJobInfo(newSimilarJobs[0]);
|
||||
}
|
||||
} else {
|
||||
this.thNotify.send(`Error fetching similar jobs push data: ${resp.message}`, 'danger', { sticky: true });
|
||||
}
|
||||
}
|
||||
this.setState({ isLoading: false });
|
||||
|
|
|
@ -3,7 +3,7 @@ import PropTypes from 'prop-types';
|
|||
|
||||
import { thEvents } from '../../js/constants';
|
||||
import { getBtnClass } from '../../helpers/job';
|
||||
import { getUrlParam } from '../../helpers/location';
|
||||
import { getRepo, getUrlParam } from '../../helpers/location';
|
||||
import WatchedRepo from './WatchedRepo';
|
||||
import RepositoryModel from '../../models/repository';
|
||||
|
||||
|
@ -25,7 +25,6 @@ export default class SecondaryNavBar extends React.Component {
|
|||
this.thJobFilters.filterGroups.nonfailures,
|
||||
'in progress'].reduce((acc, val) => acc.concat(val), []);
|
||||
const searchStr = this.thJobFilters.getFieldFiltersObj().searchStr;
|
||||
this.repoName = getUrlParam('repo');
|
||||
|
||||
this.state = {
|
||||
groupsExpanded: getUrlParam('group_state') === 'expanded',
|
||||
|
@ -35,9 +34,14 @@ export default class SecondaryNavBar extends React.Component {
|
|||
watchedRepoNames: [],
|
||||
allUnclassifiedFailureCount: 0,
|
||||
filteredUnclassifiedFailureCount: 0,
|
||||
repoName: getRepo(),
|
||||
};
|
||||
}
|
||||
|
||||
static getDerivedStateFromProps() {
|
||||
return { repoName: getRepo() };
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
const { history } = this.props;
|
||||
|
||||
|
@ -165,12 +169,14 @@ export default class SecondaryNavBar extends React.Component {
|
|||
}
|
||||
|
||||
loadWatchedRepos() {
|
||||
const { repoName } = this.state;
|
||||
|
||||
try {
|
||||
const storedWatched = JSON.parse(localStorage.getItem(WATCHED_REPOS_STORAGE_KEY)) || [];
|
||||
// Ensure the current repo is first in the list
|
||||
const watchedRepoNames = [
|
||||
this.repoName,
|
||||
...storedWatched.filter(value => (value !== this.repoName)),
|
||||
repoName,
|
||||
...storedWatched.filter(value => (value !== repoName)),
|
||||
].slice(0, MAX_WATCHED_REPOS);
|
||||
|
||||
// Re-save the list, in case it has now changed
|
||||
|
@ -197,7 +203,7 @@ export default class SecondaryNavBar extends React.Component {
|
|||
} = this.props;
|
||||
const {
|
||||
watchedRepoNames, groupsExpanded, showDuplicateJobs, searchQueryStr,
|
||||
allUnclassifiedFailureCount, filteredUnclassifiedFailureCount,
|
||||
allUnclassifiedFailureCount, filteredUnclassifiedFailureCount, repoName,
|
||||
} = this.state;
|
||||
// This array needs to be RepositoryModel objects, not strings.
|
||||
// If ``repos`` is not yet populated, then leave as empty array.
|
||||
|
@ -217,7 +223,7 @@ export default class SecondaryNavBar extends React.Component {
|
|||
<WatchedRepo
|
||||
key={watchedRepo.name}
|
||||
repo={watchedRepo}
|
||||
repoName={this.repoName}
|
||||
repoName={repoName}
|
||||
$injector={$injector}
|
||||
unwatchRepo={this.unwatchRepo}
|
||||
setCurrentRepoTreeStatus={setCurrentRepoTreeStatus}
|
||||
|
|
|
@ -384,3 +384,5 @@ export const phAlertStatusMap = {
|
|||
export const phCompareBaseLineDefaultTimeRange = 86400 * 2;
|
||||
|
||||
export const thBugSuggestionLimit = 20;
|
||||
|
||||
export const thMaxPushFetchSize = 100;
|
||||
|
|
|
@ -4,14 +4,15 @@ import { isReftest } from '../../helpers/job';
|
|||
import { thDateFormat } from '../constants';
|
||||
import JobDetailModel from '../../models/jobDetail';
|
||||
import JobModel from '../../models/job';
|
||||
import PushModel from '../../models/push';
|
||||
import TextLogStepModel from '../../models/textLogStep';
|
||||
|
||||
logViewerApp.controller('LogviewerCtrl', [
|
||||
'$location', '$window', '$document', '$rootScope', '$scope',
|
||||
'$timeout', 'thNotify', 'dateFilter', 'ThResultSetModel',
|
||||
'$timeout', 'thNotify', 'dateFilter',
|
||||
function Logviewer(
|
||||
$location, $window, $document, $rootScope, $scope,
|
||||
$timeout, thNotify, dateFilter, ThResultSetModel) {
|
||||
$timeout, thNotify, dateFilter) {
|
||||
|
||||
const query_string = $location.search();
|
||||
$scope.css = '';
|
||||
|
@ -158,7 +159,7 @@ logViewerApp.controller('LogviewerCtrl', [
|
|||
$scope.init = () => {
|
||||
$scope.logProperties = [];
|
||||
|
||||
JobModel.get($scope.repoName, $scope.job_id).then((job) => {
|
||||
JobModel.get($scope.repoName, $scope.job_id).then(async (job) => {
|
||||
// set the title of the browser window/tab
|
||||
$scope.logViewerTitle = job.getTitle();
|
||||
|
||||
|
@ -190,15 +191,15 @@ logViewerApp.controller('LogviewerCtrl', [
|
|||
}
|
||||
|
||||
// get the revision and linkify it
|
||||
ThResultSetModel.getResultSet($scope.repoName, job.result_set_id).then((data) => {
|
||||
const revision = data.data.revision;
|
||||
PushModel.get(job.result_set_id).then(async (resp) => {
|
||||
const push = await resp.json();
|
||||
const revision = push.revision;
|
||||
|
||||
$scope.logProperties.push({ label: 'Revision', value: revision });
|
||||
});
|
||||
|
||||
JobDetailModel.getJobDetails({ job_guid: job.job_guid }).then((jobDetails) => {
|
||||
$scope.job_details = jobDetails;
|
||||
});
|
||||
$scope.job_details = await JobDetailModel.getJobDetails({ job_guid: job.job_guid });
|
||||
$scope.$apply();
|
||||
}).catch((error) => {
|
||||
$scope.loading = false;
|
||||
$scope.jobExists = false;
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
phAlertStatusMap,
|
||||
} from '../../constants';
|
||||
import OptionCollectionModel from '../../../models/optionCollection';
|
||||
import PushModel from '../../../models/push';
|
||||
import RepositoryModel from '../../../models/repository';
|
||||
|
||||
perf.factory('PhBugs', [
|
||||
|
@ -188,12 +189,10 @@ perf.controller(
|
|||
|
||||
perf.controller('AlertsCtrl', [
|
||||
'$state', '$stateParams', '$scope', '$rootScope', '$q', '$uibModal',
|
||||
'ThResultSetModel',
|
||||
'PhFramework', 'PhAlerts', 'PhBugs', 'PhIssueTracker',
|
||||
'dateFilter', 'clipboard',
|
||||
function AlertsCtrl($state, $stateParams, $scope, $rootScope, $q,
|
||||
$uibModal,
|
||||
ThResultSetModel,
|
||||
PhFramework, PhAlerts, PhBugs, PhIssueTracker,
|
||||
dateFilter, clipboard) {
|
||||
$scope.alertSummaries = undefined;
|
||||
|
@ -436,28 +435,27 @@ perf.controller('AlertsCtrl', [
|
|||
});
|
||||
|
||||
$q.all(Object.keys(resultSetToSummaryMap).map(repo =>
|
||||
ThResultSetModel.getResultSetList(
|
||||
repo, Object.keys(resultSetToSummaryMap[repo]), true).then(
|
||||
(response) => {
|
||||
response.data.results.forEach((resultSet) => {
|
||||
resultSet.dateStr = dateFilter(
|
||||
resultSet.push_timestamp * 1000, thDateFormat);
|
||||
// want at least 14 days worth of results for relative comparisons
|
||||
const timeRange = phTimeRangeValues[repo] ? phTimeRangeValues[repo] : phDefaultTimeRangeValue;
|
||||
resultSet.timeRange = Math.max(timeRange,
|
||||
phTimeRanges.map(timeRange => timeRange.value).find(
|
||||
t => ((Date.now() / 1000.0) - resultSet.push_timestamp) < t));
|
||||
resultSetToSummaryMap[repo][resultSet.id].forEach(
|
||||
(summary) => {
|
||||
if (summary.push_id === resultSet.id) {
|
||||
summary.resultSetMetadata = resultSet;
|
||||
} else if (summary.prev_push_id === resultSet.id) {
|
||||
summary.prevResultSetMetadata = resultSet;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
}),
|
||||
PushModel.getList({ repo, id__in: Object.keys(resultSetToSummaryMap[repo]).join(',') })
|
||||
.then(async (response) => {
|
||||
const { results } = await response.json();
|
||||
results.forEach((resultSet) => {
|
||||
resultSet.dateStr = dateFilter(
|
||||
resultSet.push_timestamp * 1000, thDateFormat);
|
||||
// want at least 14 days worth of results for relative comparisons
|
||||
const timeRange = phTimeRangeValues[repo] ? phTimeRangeValues[repo] : phDefaultTimeRangeValue;
|
||||
resultSet.timeRange = Math.max(timeRange,
|
||||
phTimeRanges.map(timeRange => timeRange.value).find(
|
||||
t => ((Date.now() / 1000.0) - resultSet.push_timestamp) < t));
|
||||
resultSetToSummaryMap[repo][resultSet.id].forEach(
|
||||
(summary) => {
|
||||
if (summary.push_id === resultSet.id) {
|
||||
summary.resultSetMetadata = resultSet;
|
||||
} else if (summary.prev_push_id === resultSet.id) {
|
||||
summary.prevResultSetMetadata = resultSet;
|
||||
}
|
||||
});
|
||||
});
|
||||
}),
|
||||
)).then(() => {
|
||||
// for all complete summaries, fill in job and pushlog links
|
||||
// and downstream summaries
|
||||
|
|
|
@ -9,13 +9,14 @@ import {
|
|||
phTimeRanges,
|
||||
phCompareBaseLineDefaultTimeRange,
|
||||
} from '../../constants';
|
||||
import PushModel from '../../../models/push';
|
||||
import RepositoryModel from '../../../models/repository';
|
||||
|
||||
perf.controller('CompareChooserCtrl', [
|
||||
'$state', '$stateParams', '$scope', '$q', 'ThResultSetModel',
|
||||
'$state', '$stateParams', '$scope', '$q',
|
||||
'localStorageService',
|
||||
function CompareChooserCtrl($state, $stateParams, $scope, $q,
|
||||
ThResultSetModel, localStorageService) {
|
||||
localStorageService) {
|
||||
RepositoryModel.getList().then((projects) => {
|
||||
$scope.projects = projects;
|
||||
$scope.originalTipList = [];
|
||||
|
@ -45,14 +46,16 @@ perf.controller('CompareChooserCtrl', [
|
|||
// due to we push the revision data into list,
|
||||
// so we need clear the data before we push new data into it.
|
||||
list.splice(0, list.length);
|
||||
ThResultSetModel.getResultSets(projectName).then(function (response) {
|
||||
const resultsets = response.data.results;
|
||||
resultsets.forEach(function (revisionSet) {
|
||||
PushModel.getList({ repo: projectName }).then(async (response) => {
|
||||
const { results } = await response.json();
|
||||
|
||||
results.forEach(function (revisionSet) {
|
||||
list.push({
|
||||
revision: revisionSet.revision,
|
||||
author: revisionSet.author,
|
||||
});
|
||||
});
|
||||
$scope.$apply();
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -76,24 +79,30 @@ perf.controller('CompareChooserCtrl', [
|
|||
$scope.runCompare = function () {
|
||||
const revisionPromises = [];
|
||||
if ($scope.revisionComparison) {
|
||||
revisionPromises.push(ThResultSetModel.getResultSetsFromRevision($scope.originalProject.name, $scope.originalRevision).then(
|
||||
function () {
|
||||
revisionPromises.push(PushModel.getList({
|
||||
repo: $scope.originalProject.name,
|
||||
revision: $scope.originalRevision,
|
||||
}).then((resp) => {
|
||||
if (resp.ok) {
|
||||
$scope.originalRevisionError = undefined;
|
||||
},
|
||||
function (error) {
|
||||
$scope.originalRevisionError = error;
|
||||
},
|
||||
));
|
||||
} else {
|
||||
$scope.originalRevisionError = resp.statusText;
|
||||
}
|
||||
$scope.$apply();
|
||||
}));
|
||||
}
|
||||
|
||||
revisionPromises.push(ThResultSetModel.getResultSetsFromRevision($scope.newProject.name, $scope.newRevision).then(
|
||||
function () {
|
||||
revisionPromises.push(PushModel.getList({
|
||||
repo: $scope.newProject.name,
|
||||
revision: $scope.newRevision,
|
||||
}).then((resp) => {
|
||||
if (resp.ok) {
|
||||
$scope.newRevisionError = undefined;
|
||||
},
|
||||
function (error) {
|
||||
$scope.newRevisionError = error;
|
||||
},
|
||||
));
|
||||
} else {
|
||||
$scope.newRevisionError = resp.statusText;
|
||||
}
|
||||
$scope.$apply();
|
||||
}));
|
||||
|
||||
$q.all(revisionPromises).then(function () {
|
||||
localStorageService.set('originalProject', $scope.originalProject.name, 'sessionStorage');
|
||||
|
@ -124,10 +133,10 @@ perf.controller('CompareChooserCtrl', [
|
|||
|
||||
perf.controller('CompareResultsCtrl', [
|
||||
'$state', '$stateParams', '$scope',
|
||||
'ThResultSetModel', '$httpParamSerializer', '$q', 'PhFramework', 'PhSeries',
|
||||
'$httpParamSerializer', '$q', 'PhFramework', 'PhSeries',
|
||||
'PhCompare',
|
||||
function CompareResultsCtrl($state, $stateParams, $scope,
|
||||
ThResultSetModel, $httpParamSerializer,
|
||||
$httpParamSerializer,
|
||||
$q, PhFramework, PhSeries,
|
||||
PhCompare) {
|
||||
function displayResults(rawResultsMap, newRawResultsMap) {
|
||||
|
@ -263,9 +272,9 @@ perf.controller('CompareResultsCtrl', [
|
|||
$scope.testList = Object.keys($scope.compareResults).sort().concat([noiseMetricTestName]);
|
||||
$scope.titles[noiseMetricTestName] = noiseMetricTestName;
|
||||
|
||||
// call digest explicitly so we don't have to worry about when promises
|
||||
// call $apply explicitly so we don't have to worry about when promises
|
||||
// get resolved (see bug 1470600)
|
||||
$scope.$digest();
|
||||
$scope.$apply();
|
||||
}
|
||||
|
||||
function load() {
|
||||
|
@ -359,25 +368,35 @@ perf.controller('CompareResultsCtrl', [
|
|||
{ push_id: [$scope.newResultSet.id] });
|
||||
}).then((resultMaps) => {
|
||||
$scope.dataLoading = false;
|
||||
displayResults(originalResultsMap, resultMaps[$scope.newResultSet.id]);
|
||||
const newResult = resultMaps[$scope.newResultSet.id];
|
||||
if (newResult) {
|
||||
displayResults(originalResultsMap, newResult);
|
||||
}
|
||||
$scope.$apply();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
// TODO: duplicated in comparesubtestctrl
|
||||
function verifyRevision(project, revision, rsid) {
|
||||
return ThResultSetModel.getResultSetsFromRevision(project.name, revision).then(
|
||||
function (resultSets) {
|
||||
const resultSet = resultSets[0];
|
||||
// TODO: this is a bit hacky to pass in 'original' as a text string
|
||||
if (rsid === 'original') {
|
||||
$scope.originalResultSet = resultSet;
|
||||
|
||||
return PushModel.getList({ repo: project.name, revision })
|
||||
.then(async (resp) => {
|
||||
if (resp.ok) {
|
||||
const { results } = await resp.json();
|
||||
const resultSet = results[0];
|
||||
// TODO: this is a bit hacky to pass in 'original' as a text string
|
||||
if (rsid === 'original') {
|
||||
$scope.originalResultSet = resultSet;
|
||||
} else {
|
||||
$scope.newResultSet = resultSet;
|
||||
}
|
||||
} else {
|
||||
$scope.newResultSet = resultSet;
|
||||
const error = await resp.text();
|
||||
$scope.errors.push(error);
|
||||
}
|
||||
},
|
||||
function (error) {
|
||||
$scope.errors.push(error);
|
||||
}).catch((error) => {
|
||||
$scope.errors.push(error);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -491,24 +510,25 @@ perf.controller('CompareResultsCtrl', [
|
|||
|
||||
perf.controller('CompareSubtestResultsCtrl', [
|
||||
'$state', '$stateParams', '$scope',
|
||||
'ThResultSetModel', '$q', 'PhSeries',
|
||||
'$q', 'PhSeries',
|
||||
'PhCompare', '$httpParamSerializer',
|
||||
function CompareSubtestResultsCtrl($state, $stateParams, $scope,
|
||||
ThResultSetModel,
|
||||
$q, PhSeries,
|
||||
PhCompare,
|
||||
$httpParamSerializer) {
|
||||
// TODO: duplicated from comparectrl
|
||||
function verifyRevision(project, revision, rsid) {
|
||||
return ThResultSetModel.getResultSetsFromRevision(project.name, revision).then(
|
||||
function (resultSets) {
|
||||
const resultSet = resultSets[0];
|
||||
return PushModel.getList({ repo: project.name, revision })
|
||||
.then(async (resp) => {
|
||||
const { results } = await resp.json();
|
||||
const resultSet = results[0];
|
||||
// TODO: this is a bit hacky to pass in 'original' as a text string
|
||||
if (rsid === 'original') {
|
||||
$scope.originalResultSet = resultSet;
|
||||
} else {
|
||||
$scope.newResultSet = resultSet;
|
||||
}
|
||||
$scope.$apply();
|
||||
},
|
||||
function (error) {
|
||||
$scope.errors.push(error);
|
||||
|
@ -622,9 +642,9 @@ perf.controller('CompareSubtestResultsCtrl', [
|
|||
}
|
||||
$scope.titles[noiseMetricTestName] = $scope.platformList[0] + ': ' + testName + ' : ' + noiseMetricTestName;
|
||||
|
||||
// call digest explicitly so we don't have to worry about when promises
|
||||
// call $apply explicitly so we don't have to worry about when promises
|
||||
// get resolved (see bug 1470600)
|
||||
$scope.$digest();
|
||||
$scope.$apply();
|
||||
}
|
||||
|
||||
$scope.dataLoading = true;
|
||||
|
@ -887,9 +907,9 @@ perf.controller('CompareSubtestResultsCtrl', [
|
|||
}]);
|
||||
|
||||
perf.controller('CompareSubtestDistributionCtrl', ['$scope', '$stateParams', '$q',
|
||||
'PhSeries', 'ThResultSetModel',
|
||||
'PhSeries',
|
||||
function CompareSubtestDistributionCtrl($scope, $stateParams, $q,
|
||||
PhSeries, ThResultSetModel) {
|
||||
PhSeries) {
|
||||
$scope.originalRevision = $stateParams.originalRevision;
|
||||
$scope.newRevision = $stateParams.newRevision;
|
||||
$scope.originalSubtestSignature = $stateParams.originalSubtestSignature;
|
||||
|
@ -898,9 +918,11 @@ perf.controller('CompareSubtestDistributionCtrl', ['$scope', '$stateParams', '$q
|
|||
const loadRepositories = RepositoryModel.getList();
|
||||
const fetchAndDrawReplicateGraph = function (project, revision, subtestSignature, target) {
|
||||
const replicateData = {};
|
||||
return ThResultSetModel.getResultSetsFromRevision(project, revision).then(
|
||||
(revisionData) => {
|
||||
replicateData.resultSet = revisionData[0];
|
||||
|
||||
return PushModel.getList({ repo: project, revision })
|
||||
.then(async (resp) => {
|
||||
const { results } = await resp.json();
|
||||
replicateData.resultSet = results[0];
|
||||
return PhSeries.getSeriesData(project, {
|
||||
signatures: subtestSignature,
|
||||
push_id: replicateData.resultSet.id,
|
||||
|
@ -955,7 +977,8 @@ perf.controller('CompareSubtestDistributionCtrl', ['$scope', '$stateParams', '$q
|
|||
});
|
||||
};
|
||||
|
||||
$q.all([loadRepositories]).then((repos) => {
|
||||
$q.all([loadRepositories]).then((results) => {
|
||||
const repos = results[0];
|
||||
$scope.originalProject = RepositoryModel.getRepo(
|
||||
$stateParams.originalProject, repos);
|
||||
$scope.newProject = RepositoryModel.getRepo(
|
||||
|
@ -982,6 +1005,7 @@ perf.controller('CompareSubtestDistributionCtrl', ['$scope', '$stateParams', '$q
|
|||
$scope.newReplicateError = result.replicateDataError;
|
||||
window.document.title = `${$scope.platform}: ${$scope.testName}`;
|
||||
$scope.dataLoading = false;
|
||||
$scope.$apply();
|
||||
});
|
||||
});
|
||||
},
|
||||
|
|
|
@ -3,6 +3,7 @@ import chunk from 'lodash/chunk';
|
|||
|
||||
import perf from '../../perf';
|
||||
import { thDefaultRepo, phBlockers, phTimeRanges } from '../../constants';
|
||||
import PushModel from '../../../models/push';
|
||||
import RepositoryModel from '../../../models/repository';
|
||||
|
||||
const phDashboardValues = {
|
||||
|
@ -27,12 +28,9 @@ perf.value('defaultTimeRange', 86400 * 2);
|
|||
|
||||
perf.controller('dashCtrl', [
|
||||
'$state', '$stateParams', '$scope', '$rootScope', '$q', '$httpParamSerializer',
|
||||
'ThResultSetModel', 'PhSeries', 'PhCompare',
|
||||
'defaultTimeRange',
|
||||
'PhSeries', 'PhCompare', 'defaultTimeRange',
|
||||
function dashCtrl($state, $stateParams, $scope, $rootScope, $q, $httpParamSerializer,
|
||||
ThResultSetModel, PhSeries, PhCompare,
|
||||
defaultTimeRange) {
|
||||
|
||||
PhSeries, PhCompare, defaultTimeRange) {
|
||||
$scope.dataLoading = true;
|
||||
$scope.timeRanges = phTimeRanges;
|
||||
$scope.selectedTimeRange = $scope.timeRanges.find(timeRange =>
|
||||
|
@ -70,14 +68,17 @@ perf.controller('dashCtrl', [
|
|||
let getSeriesList;
|
||||
let resultSetId;
|
||||
if ($scope.revision) {
|
||||
getSeriesList = ThResultSetModel.getResultSetsFromRevision(
|
||||
$scope.selectedRepo.name, $scope.revision).then(function (resultSets) {
|
||||
resultSetId = resultSets[0].id;
|
||||
return PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
push_id: resultSetId, subtests: 0 });
|
||||
}, function () {
|
||||
$scope.revisionNotFound = true;
|
||||
});
|
||||
getSeriesList = PushModel.getList({
|
||||
repo: $scope.selectedRepo.name,
|
||||
revision: $scope.revision,
|
||||
}).then(async (resp) => {
|
||||
const { results } = resp.json();
|
||||
resultSetId = results[0].id;
|
||||
return PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
push_id: resultSetId, subtests: 0 });
|
||||
}, function () {
|
||||
$scope.revisionNotFound = true;
|
||||
});
|
||||
} else {
|
||||
getSeriesList = PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
interval: $scope.selectedTimeRange.value,
|
||||
|
@ -239,11 +240,9 @@ perf.controller('dashCtrl', [
|
|||
|
||||
perf.controller('dashSubtestCtrl', [
|
||||
'$state', '$stateParams', '$scope', '$rootScope', '$q',
|
||||
'ThResultSetModel', 'PhSeries', 'PhCompare',
|
||||
'defaultTimeRange',
|
||||
'PhSeries', 'PhCompare', 'defaultTimeRange',
|
||||
function ($state, $stateParams, $scope, $rootScope, $q,
|
||||
ThResultSetModel, PhSeries, PhCompare,
|
||||
defaultTimeRange) {
|
||||
PhSeries, PhCompare, defaultTimeRange) {
|
||||
|
||||
const baseSignature = $stateParams.baseSignature;
|
||||
const variantSignature = $stateParams.variantSignature;
|
||||
|
@ -275,14 +274,17 @@ perf.controller('dashSubtestCtrl', [
|
|||
let getSeriesList;
|
||||
let resultSetId;
|
||||
if ($scope.revision) {
|
||||
getSeriesList = ThResultSetModel.getResultSetsFromRevision(
|
||||
$scope.selectedRepo.name, $scope.revision).then(function (resultSets) {
|
||||
resultSetId = resultSets[0].id;
|
||||
return PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
parent_signature: [baseSignature, variantSignature],
|
||||
framework: $scope.framework,
|
||||
});
|
||||
getSeriesList = PushModel.getList({
|
||||
repo: $scope.selectedRepo.name,
|
||||
revision: $scope.revision,
|
||||
}).then(async (resp) => {
|
||||
const { results } = await resp.json();
|
||||
resultSetId = results[0].id;
|
||||
return PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
parent_signature: [baseSignature, variantSignature],
|
||||
framework: $scope.framework,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
getSeriesList = PhSeries.getSeriesList($scope.selectedRepo.name, {
|
||||
parent_signature: [baseSignature, variantSignature],
|
||||
|
|
|
@ -17,15 +17,14 @@ import {
|
|||
phDefaultFramework,
|
||||
thPerformanceBranches,
|
||||
} from '../../constants';
|
||||
import PushModel from '../../../models/push';
|
||||
import RepositoryModel from '../../../models/repository';
|
||||
|
||||
perf.controller('GraphsCtrl', [
|
||||
'$state', '$stateParams', '$scope', '$rootScope', '$uibModal',
|
||||
'$window', '$q', '$timeout', 'PhSeries', 'PhAlerts',
|
||||
'ThResultSetModel',
|
||||
function GraphsCtrl($state, $stateParams, $scope, $rootScope,
|
||||
$uibModal, $window, $q, $timeout, PhSeries,
|
||||
PhAlerts, ThResultSetModel) {
|
||||
$uibModal, $window, $q, $timeout, PhSeries, PhAlerts) {
|
||||
var availableColors = ['maroon', 'navy', 'pink', 'turquoise', 'brown',
|
||||
'red', 'green', 'blue', 'orange', 'purple'];
|
||||
|
||||
|
@ -166,16 +165,17 @@ perf.controller('GraphsCtrl', [
|
|||
resultSetId: prevResultSetId,
|
||||
scopeKey: 'prevRevision',
|
||||
}].forEach((resultRevision) => {
|
||||
ThResultSetModel.getRevisions(
|
||||
phSeries.projectName, resultRevision.resultSetId,
|
||||
).then(function (revisions) {
|
||||
$scope.tooltipContent[resultRevision.scopeKey] = revisions[0];
|
||||
PushModel.get(resultRevision.resultSetId, { repo: phSeries.projectName })
|
||||
.then(async (resp) => {
|
||||
const push = await resp.json();
|
||||
$scope.tooltipContent[resultRevision.scopeKey] = push.revision;
|
||||
if ($scope.tooltipContent.prevRevision && $scope.tooltipContent.revision) {
|
||||
$scope.tooltipContent.pushlogURL = $scope.tooltipContent.project.getPushLogRangeHref({
|
||||
fromchange: $scope.tooltipContent.prevRevision,
|
||||
tochange: $scope.tooltipContent.revision,
|
||||
});
|
||||
}
|
||||
$scope.$apply();
|
||||
}, function () {
|
||||
$scope.tooltipContent.revisionInfoAvailable = false;
|
||||
});
|
||||
|
@ -433,13 +433,19 @@ perf.controller('GraphsCtrl', [
|
|||
...highlightPromises,
|
||||
...$scope.seriesList.map((series) => {
|
||||
if (series.visible) {
|
||||
return ThResultSetModel.getResultSetsFromRevision(
|
||||
series.projectName, rev).then((resultSets) => {
|
||||
addHighlightedDatapoint(series, resultSets[0].id);
|
||||
}, () => {
|
||||
/* ignore cases where no result set exists
|
||||
for revision */
|
||||
});
|
||||
return PushModel.getList({
|
||||
repo: series.projectName,
|
||||
revision: rev,
|
||||
}).then(async (resp) => {
|
||||
if (resp.ok) {
|
||||
const { results } = await resp.json();
|
||||
|
||||
addHighlightedDatapoint(series, results[0].id);
|
||||
$scope.$apply();
|
||||
}
|
||||
// ignore cases where no push exists
|
||||
// for revision
|
||||
});
|
||||
}
|
||||
return null;
|
||||
})])];
|
||||
|
|
|
@ -1,332 +0,0 @@
|
|||
import _ from 'lodash';
|
||||
import angular from 'angular';
|
||||
import jsyaml from 'js-yaml';
|
||||
import { slugid } from 'taskcluster-client-web';
|
||||
|
||||
import treeherder from '../treeherder';
|
||||
import taskcluster from '../../helpers/taskcluster';
|
||||
import { getProjectUrl, getServiceUrl } from '../../helpers/url';
|
||||
import JobModel from '../../models/job';
|
||||
import TaskclusterModel from '../../models/taskcluster';
|
||||
|
||||
treeherder.factory('ThResultSetModel', ['$http', '$location',
|
||||
'$q', '$interpolate', 'thNotify',
|
||||
function ($http, $location, $q, $interpolate, thNotify) {
|
||||
|
||||
const MAX_RESULTSET_FETCH_SIZE = 100;
|
||||
const taskclusterModel = new TaskclusterModel(thNotify);
|
||||
const convertDates = function (locationParams) {
|
||||
// support date ranges. we must convert the strings to a timezone
|
||||
// appropriate timestamp
|
||||
if (_.has(locationParams, 'startdate')) {
|
||||
locationParams.push_timestamp__gte = Date.parse(
|
||||
locationParams.startdate) / 1000;
|
||||
|
||||
delete locationParams.startdate;
|
||||
}
|
||||
if (_.has(locationParams, 'enddate')) {
|
||||
locationParams.push_timestamp__lt = Date.parse(
|
||||
locationParams.enddate) / 1000 + 84600;
|
||||
|
||||
delete locationParams.enddate;
|
||||
}
|
||||
return locationParams;
|
||||
};
|
||||
|
||||
// return whether an OLDEST resultset range is set.
|
||||
const hasLowerRange = function (locationParams) {
|
||||
return locationParams.fromchange || locationParams.startdate;
|
||||
};
|
||||
|
||||
// get the resultsets for this repo
|
||||
return {
|
||||
// used for polling new resultsets after initial load
|
||||
getResultSetsFromChange: function (repoName, revision, locationParams) {
|
||||
locationParams = convertDates(locationParams);
|
||||
_.extend(locationParams, {
|
||||
fromchange: revision,
|
||||
});
|
||||
|
||||
return $http.get(
|
||||
getProjectUrl('/resultset/', repoName),
|
||||
{ params: locationParams },
|
||||
);
|
||||
},
|
||||
|
||||
getResultSets: function (repoName, rsOffsetTimestamp, count, full, keep_filters) {
|
||||
rsOffsetTimestamp = typeof rsOffsetTimestamp === 'undefined' ? 0 : rsOffsetTimestamp;
|
||||
full = full === undefined ? true : full;
|
||||
keep_filters = keep_filters === undefined ? true : keep_filters;
|
||||
|
||||
const params = {
|
||||
full: full,
|
||||
};
|
||||
|
||||
// count defaults to 10, but can be no larger than the max.
|
||||
params.count = !count ? 10 : Math.min(count, MAX_RESULTSET_FETCH_SIZE);
|
||||
|
||||
if (rsOffsetTimestamp) {
|
||||
params.push_timestamp__lte = rsOffsetTimestamp;
|
||||
// we will likely re-fetch the oldest we already have, but
|
||||
// that's not guaranteed. There COULD be two resultsets
|
||||
// with the same timestamp, theoretically.
|
||||
params.count++;
|
||||
}
|
||||
|
||||
if (keep_filters) {
|
||||
// if there are any search params on the url line, they should
|
||||
// pass directly to the set of resultsets.
|
||||
// with the exception of ``repo``. That has no effect on the
|
||||
// service at this time, but it could be confusing.
|
||||
let locationParams = { ...$location.search() };
|
||||
delete locationParams.repo;
|
||||
|
||||
// if they submit an offset timestamp, then they have resultsets
|
||||
// and are fetching more. So don't honor the fromchange/tochange
|
||||
// or else we won't be able to fetch more resultsets.
|
||||
|
||||
// we DID already check for rsOffsetTimestamp above, but that was
|
||||
// not within the ``keep_filters`` check. If we don't
|
||||
// keep filters, we don't need to clone the $location.search().
|
||||
if (rsOffsetTimestamp) {
|
||||
delete locationParams.tochange;
|
||||
delete locationParams.fromchange;
|
||||
} else if (hasLowerRange(locationParams)) {
|
||||
// fetch the maximum number of resultsets if a lower range is specified
|
||||
params.count = MAX_RESULTSET_FETCH_SIZE;
|
||||
} else if (locationParams.revision) {
|
||||
// fetch a single resultset if `revision` is a URL param
|
||||
delete params.count;
|
||||
}
|
||||
|
||||
locationParams = convertDates(locationParams);
|
||||
_.extend(params, locationParams);
|
||||
}
|
||||
|
||||
return $http.get(
|
||||
getProjectUrl('/resultset/', repoName),
|
||||
{ params: params },
|
||||
);
|
||||
},
|
||||
getResultSetList: function (repoName, resultSetList, full) {
|
||||
return $http.get(
|
||||
getProjectUrl('/resultset/', repoName), {
|
||||
params: {
|
||||
full: full === undefined ? true : full,
|
||||
offset: 0,
|
||||
count: resultSetList.length,
|
||||
id__in: resultSetList.join(),
|
||||
},
|
||||
});
|
||||
},
|
||||
getResultSet: function (repoName, pk) {
|
||||
return $http.get(
|
||||
getProjectUrl(`/resultset/${pk}/`, repoName),
|
||||
);
|
||||
},
|
||||
get: function (uri) {
|
||||
return $http.get(getServiceUrl(uri));
|
||||
},
|
||||
getResultSetJobsUpdates: function (resultSetIdList, repoName, lastModified,
|
||||
locationParams) {
|
||||
// XXX: should never happen, but maybe sometimes does? see bug 1287501
|
||||
if (!angular.isDate(lastModified)) {
|
||||
alert('Invalid parameter passed to get job updates: ' +
|
||||
'please reload treeherder');
|
||||
return;
|
||||
}
|
||||
|
||||
const params = {
|
||||
result_set_id__in: resultSetIdList.join(','),
|
||||
count: 2000,
|
||||
last_modified__gt: lastModified.toISOString().replace('Z', ''),
|
||||
return_type: 'list',
|
||||
};
|
||||
_.extend(params, locationParams);
|
||||
return JobModel.getList(repoName, params, { fetch_all: true });
|
||||
},
|
||||
|
||||
getResultSetJobs: function (resultSetIdList, repoName, locationParams) {
|
||||
return resultSetIdList.map((resultSetId) => {
|
||||
const params = {
|
||||
return_type: 'list',
|
||||
result_set_id: resultSetId,
|
||||
count: 2000,
|
||||
};
|
||||
_.extend(params, locationParams);
|
||||
return JobModel.getList(repoName, params, { fetch_all: true });
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
getRevisions: function (projectName, resultSetId) {
|
||||
return $http.get(getProjectUrl(
|
||||
`/resultset/${resultSetId}/`, projectName), { cache: true }).then(
|
||||
function (response) {
|
||||
if (response.data.revisions.length > 0) {
|
||||
return response.data.revisions.map(r => r.revision);
|
||||
}
|
||||
return $q.reject('No revisions found for result set ' +
|
||||
resultSetId + ' in project ' + projectName);
|
||||
});
|
||||
},
|
||||
|
||||
getResultSetsFromRevision: function (projectName, revision) {
|
||||
return $http.get(getProjectUrl(
|
||||
`/resultset/?revision=${revision}`, projectName),
|
||||
{ cache: true }).then(
|
||||
function (response) {
|
||||
if (response.data.results.length > 0) {
|
||||
return response.data.results;
|
||||
}
|
||||
return $q.reject('No results found for revision ' +
|
||||
revision + ' on project ' +
|
||||
projectName);
|
||||
});
|
||||
},
|
||||
|
||||
cancelAll: function (resultset_id) {
|
||||
const uri = resultset_id + '/cancel_all/';
|
||||
return $http.post(getProjectUrl('/resultset/') + uri);
|
||||
},
|
||||
|
||||
triggerMissingJobs: function (decisionTaskId) {
|
||||
return taskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const missingtask = results.actions.find(action =>
|
||||
action.name === 'run-missing-tests');
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (missingtask) {
|
||||
return taskclusterModel.submit({
|
||||
action: missingtask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: {},
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(() => `Request sent to trigger missing jobs via actions.json (${actionTaskId})`);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
triggerAllTalosJobs: function (times, decisionTaskId) {
|
||||
return taskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const talostask = results.actions.find(action =>
|
||||
action.name === 'run-all-talos');
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (talostask) {
|
||||
return taskclusterModel.submit({
|
||||
action: talostask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: { times },
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(function () {
|
||||
return `Request sent to trigger all talos jobs ${times} time(s) via actions.json (${actionTaskId})`;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise we'll figure things out with actions.yml
|
||||
const queue = taskcluster.getQueue();
|
||||
const url = queue.buildUrl(queue.getLatestArtifact, decisionTaskId, 'public/action.yml');
|
||||
return $http.get(url).then(function (resp) {
|
||||
let action = resp.data;
|
||||
const template = $interpolate(action);
|
||||
action = template({
|
||||
action: 'add-talos',
|
||||
action_args: '--decision-task-id=' + decisionTaskId + ' --times=' + times,
|
||||
});
|
||||
const task = taskcluster.refreshTimestamps(jsyaml.safeLoad(action));
|
||||
return queue.createTask(actionTaskId, task).then(function () {
|
||||
return `Request sent to trigger all talos jobs ${times} time(s) via actions.yml (${actionTaskId})`;
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
triggerNewJobs: function (buildernames, decisionTaskId) {
|
||||
const queue = taskcluster.getQueue();
|
||||
const url = queue.buildUrl(
|
||||
queue.getLatestArtifact,
|
||||
decisionTaskId,
|
||||
'public/full-task-graph.json',
|
||||
);
|
||||
return $http.get(url).then(function (resp) {
|
||||
const graph = resp.data;
|
||||
|
||||
// Build a mapping of buildbot buildername to taskcluster tasklabel for bbb tasks
|
||||
const builderToTask = Object.entries(graph).reduce((currentMap, [key, value]) => {
|
||||
if (value && value.task && value.task.payload && value.task.payload.buildername) {
|
||||
currentMap[value.task.payload.buildername] = key;
|
||||
}
|
||||
return currentMap;
|
||||
}, {});
|
||||
const allLabels = Object.keys(graph);
|
||||
|
||||
const tclabels = [];
|
||||
|
||||
buildernames.forEach(function (name) {
|
||||
// The following has 2 cases that it accounts for
|
||||
// 1. The name is a taskcluster task label, in which case we pass it on
|
||||
// 2. The name is a buildbot buildername _scheduled_ through bbb, in which case we
|
||||
// translate it to the taskcluster label that triggers it.
|
||||
name = builderToTask[name] || name;
|
||||
if (allLabels.indexOf(name) !== -1) {
|
||||
tclabels.push(name);
|
||||
}
|
||||
});
|
||||
|
||||
if (tclabels.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return taskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const addjobstask = results.actions.find(action =>
|
||||
action.name === 'add-new-jobs');
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (addjobstask) {
|
||||
return taskclusterModel.submit({
|
||||
action: addjobstask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: { tasks: tclabels },
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(() => `Request sent to trigger new jobs via actions.json (${actionTaskId})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise we'll figure things out with actions.yml
|
||||
const url = queue.buildUrl(queue.getLatestArtifact, decisionTaskId, 'public/action.yml');
|
||||
return $http.get(url).then(function (resp) {
|
||||
let action = resp.data;
|
||||
const template = $interpolate(action);
|
||||
const taskLabels = tclabels.join(',');
|
||||
action = template({
|
||||
action: 'add-tasks',
|
||||
action_args: `--decision-id=${decisionTaskId} --task-labels=${taskLabels}`,
|
||||
});
|
||||
const task = taskcluster.refreshTimestamps(jsyaml.safeLoad(action));
|
||||
return queue.createTask(actionTaskId, task).then(() => `Request sent to trigger new jobs via actions.yml (${actionTaskId})`);
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
}]);
|
|
@ -10,13 +10,15 @@ import { escapeId, getGroupMapKey } from '../../helpers/aggregateId';
|
|||
import treeherder from '../treeherder';
|
||||
import JobModel from '../../models/job';
|
||||
import RunnableJobModel from '../../models/runnableJob';
|
||||
import PushModel from '../../models/push';
|
||||
import { getQueryString } from '../../helpers/location';
|
||||
import { parseQueryParams } from '../../helpers/url';
|
||||
|
||||
treeherder.factory('ThResultSetStore', [
|
||||
'$rootScope', '$q', '$location', '$interval',
|
||||
'ThResultSetModel', 'thNotify', 'thJobFilters', '$timeout',
|
||||
'thNotify', 'thJobFilters', '$timeout',
|
||||
function (
|
||||
$rootScope, $q, $location, $interval, ThResultSetModel,
|
||||
thNotify, thJobFilters, $timeout) {
|
||||
$rootScope, $q, $location, $interval, thNotify, thJobFilters, $timeout) {
|
||||
|
||||
// indexOf doesn't work on objects so we need to map thPlatformMap to an array (keeping only indexes)
|
||||
var platformArray = Object.keys(thPlatformMap);
|
||||
|
@ -83,14 +85,16 @@ treeherder.factory('ThResultSetStore', [
|
|||
// push has been created on the server out of
|
||||
// order with regards to its push_timestamp, we will
|
||||
// still pick it up.
|
||||
var fromChangeRev = repoData.pushes[repoData.pushes.length - 1].revision;
|
||||
ThResultSetModel.getResultSetsFromChange(
|
||||
repoData.name,
|
||||
fromChangeRev,
|
||||
rsPollingParams,
|
||||
).then(function (data) {
|
||||
prependPushes(data.data);
|
||||
});
|
||||
const fromchange = repoData.pushes[repoData.pushes.length - 1].revision;
|
||||
PushModel.getList({ ...parseQueryParams(getQueryString()), fromchange, ...rsPollingParams })
|
||||
.then(async (resp) => {
|
||||
if (resp.ok) {
|
||||
const data = await resp.json();
|
||||
prependPushes(data);
|
||||
} else {
|
||||
thNotify.send('Error fetching new push data', 'danger', { sticky: true });
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// cancel the interval for the polling, because
|
||||
// the parameters mean we can get no more pushes.
|
||||
|
@ -124,54 +128,48 @@ treeherder.factory('ThResultSetStore', [
|
|||
return (!_.has(rsParams, 'revision'));
|
||||
};
|
||||
|
||||
var pollJobs = function () {
|
||||
var pushIdList = repoData.pushes
|
||||
.map(x => x.id);
|
||||
const pollJobs = async () => {
|
||||
const pushIdList = repoData.pushes.map(x => x.id);
|
||||
const options = {};
|
||||
|
||||
var jobUpdatesPromise;
|
||||
if (!lastJobUpdate || (Date.now() - lastPolltime) > maxPollInterval) {
|
||||
// it is possible that some pushes might not have any jobs initially
|
||||
// also, if it's been too long, just refetch everything since
|
||||
// getting updates can be extremely slow (and taxing on the
|
||||
// server) if there are a lot of them
|
||||
jobUpdatesPromise = $q.all(ThResultSetModel.getResultSetJobs(
|
||||
pushIdList,
|
||||
repoData.name,
|
||||
));
|
||||
} else {
|
||||
jobUpdatesPromise = ThResultSetModel.getResultSetJobsUpdates(
|
||||
pushIdList,
|
||||
repoData.name,
|
||||
lastJobUpdate);
|
||||
// it is possible that some pushes might not have any jobs initially
|
||||
// also, if it's been too long, just refetch everything since
|
||||
// getting updates can be extremely slow (and taxing on the
|
||||
// server) if there are a lot of them
|
||||
if (lastJobUpdate && (Date.now() - lastPolltime) <= maxPollInterval) {
|
||||
options.lastModified = lastJobUpdate;
|
||||
}
|
||||
|
||||
lastPolltime = Date.now();
|
||||
PushModel.getJobs(pushIdList, options).then((data) => {
|
||||
const jobList = data;
|
||||
|
||||
if (jobList.length > 0) {
|
||||
lastJobUpdate = getLastModifiedJobTime(jobList);
|
||||
// group joblist by 'result_set_id'
|
||||
let jobListByPush = jobList.reduce((acc, job, idx, arr, list = job => job.result_set_id) => (
|
||||
{ ...acc, [list(job)]: [...acc[list(job)] || [], job] }
|
||||
), {});
|
||||
jobListByPush = Object.values(jobListByPush);
|
||||
jobListByPush.forEach(singlePushJobList => mapPushJobs(singlePushJobList));
|
||||
$rootScope.$emit(thEvents.jobsLoaded);
|
||||
} else if (lastJobUpdate) {
|
||||
// try to update the last poll interval to the greater of the
|
||||
// last job update or the current time minus a small multiple of the
|
||||
// job poll interval
|
||||
// (this depends on the client having a reasonably accurate internal
|
||||
// clock, but it should hopefully prevent us from getting too
|
||||
// far behind in cases where we've stopped receiving job updates
|
||||
// due e.g. to looking at a completed push)
|
||||
const lastPollInterval = new Date(Date.now() - (5 * jobPollInterval));
|
||||
lastJobUpdate = lastJobUpdate > lastPollInterval ? lastJobUpdate : lastPollInterval;
|
||||
}
|
||||
lastPolltime = Date.now();
|
||||
jobUpdatesPromise
|
||||
.then(function (jobList) {
|
||||
if (jobList.length > 0) {
|
||||
lastJobUpdate = getLastModifiedJobTime(jobList);
|
||||
// group joblist by 'result_set_id'
|
||||
var jobListByPush = jobList.reduce((acc, job, idx, arr, list = job => job.result_set_id) => (
|
||||
{ ...acc, [list(job)]: [...acc[list(job)] || [], job] }
|
||||
), {});
|
||||
jobListByPush = Object.values(jobListByPush);
|
||||
jobListByPush
|
||||
.forEach(singlePushJobList =>
|
||||
mapPushJobs(singlePushJobList));
|
||||
$rootScope.$emit(thEvents.jobsLoaded);
|
||||
} else if (lastJobUpdate) {
|
||||
// try to update the last poll interval to the greater of the
|
||||
// last job update or the current time minus a small multiple of the
|
||||
// job poll interval
|
||||
// (this depends on the client having a reasonably accurate internal
|
||||
// clock, but it should hopefully prevent us from getting too
|
||||
// far behind in cases where we've stopped receiving job updates
|
||||
// due e.g. to looking at a completed push)
|
||||
const lastPollInterval = new Date(Date.now() - (5 * jobPollInterval));
|
||||
lastJobUpdate = lastJobUpdate > lastPollInterval ? lastJobUpdate : lastPollInterval;
|
||||
}
|
||||
schedulePoll();
|
||||
});
|
||||
schedulePoll();
|
||||
}).catch((error) => {
|
||||
thNotify.send(`Error getting new jobs: ${error.toString()}`, 'danger', { sticky: true });
|
||||
});
|
||||
};
|
||||
|
||||
var registerJobsPoller = function () {
|
||||
if (!lastPolltime) {
|
||||
lastPolltime = Date.now();
|
||||
|
@ -545,6 +543,9 @@ treeherder.factory('ThResultSetStore', [
|
|||
.then((jobList) => {
|
||||
jobList.forEach((job) => { updateJob(job); });
|
||||
$timeout($rootScope.$emit(thEvents.jobsLoaded));
|
||||
})
|
||||
.catch((error) => {
|
||||
thNotify.send(`Error getting jobs: ${error.toString()}`, 'danger', { sticky: true });
|
||||
});
|
||||
}
|
||||
// retry to fetch the unfetched jobs later
|
||||
|
@ -676,6 +677,7 @@ treeherder.factory('ThResultSetStore', [
|
|||
|
||||
repoData.loadingStatus.appending = false;
|
||||
$rootScope.$emit(thEvents.pushesLoaded);
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -754,14 +756,14 @@ treeherder.factory('ThResultSetStore', [
|
|||
// this failure case is unlikely, but I guess you
|
||||
// never know
|
||||
if (!job.taskcluster_metadata) {
|
||||
return $q.reject('Decision task missing taskcluster metadata');
|
||||
return Promise.reject('Decision task missing taskcluster metadata');
|
||||
}
|
||||
return job.taskcluster_metadata.task_id;
|
||||
});
|
||||
}
|
||||
|
||||
// no decision task, we fail
|
||||
return $q.reject('No decision task');
|
||||
return Promise.reject('No decision task');
|
||||
};
|
||||
|
||||
var toggleSelectedRunnableJob = function (pushId, buildername) {
|
||||
|
@ -782,73 +784,73 @@ treeherder.factory('ThResultSetStore', [
|
|||
return repoData.jobMap;
|
||||
};
|
||||
|
||||
var fetchPushes = function (count, keepFilters) {
|
||||
var fetchPushes = function (count) {
|
||||
/**
|
||||
* Get the next batch of pushes based on our current offset.
|
||||
* @param count How many to fetch
|
||||
*/
|
||||
repoData.loadingStatus.appending = true;
|
||||
var isAppend = (repoData.pushes.length > 0);
|
||||
var pushes = { results: [] };
|
||||
var loadResultsets = ThResultSetModel.getResultSets(repoData.name,
|
||||
repoData.rsMapOldestTimestamp,
|
||||
count,
|
||||
true,
|
||||
keepFilters)
|
||||
.then((data) => { pushes = data.data; });
|
||||
const isAppend = (repoData.pushes.length > 0);
|
||||
const options = { ...parseQueryParams(getQueryString()), count };
|
||||
|
||||
return $q.all([loadResultsets])
|
||||
.then(() => appendPushes(pushes),
|
||||
() => {
|
||||
thNotify.send('Error retrieving push data!', 'danger', { sticky: true });
|
||||
appendPushes({ results: [] });
|
||||
})
|
||||
.then(() => {
|
||||
// if ``nojobs`` is on the query string, then don't load jobs.
|
||||
// this allows someone to more quickly load ranges of revisions
|
||||
// when they don't care about the specific jobs and results.
|
||||
if ($location.search().nojobs) {
|
||||
return;
|
||||
}
|
||||
var jobsPromiseList = ThResultSetModel.getResultSetJobs(
|
||||
pushes.results.map(result => result.id),
|
||||
repoData.name,
|
||||
);
|
||||
$q.all(jobsPromiseList)
|
||||
.then((pushJobList) => {
|
||||
var lastModifiedTimes = pushJobList
|
||||
.map(jobList => getLastModifiedJobTime(jobList))
|
||||
.filter(x => x);
|
||||
if (lastModifiedTimes.length) {
|
||||
var lastModifiedTime = max(lastModifiedTimes);
|
||||
// subtract 3 seconds to take in account a possible delay
|
||||
// between the job requests
|
||||
lastModifiedTime.setSeconds(lastModifiedTime.getSeconds() - 3);
|
||||
if (repoData.rsMapOldestTimestamp) {
|
||||
options.push_timestamp__lte = repoData.rsMapOldestTimestamp;
|
||||
}
|
||||
return PushModel.getList(options).then(async (resp) => {
|
||||
if (resp.ok) {
|
||||
const data = await resp.json();
|
||||
|
||||
// only update lastJobUpdate if previously unset, as we
|
||||
// may have other pushes which need an earlier update
|
||||
// if it's been a while since we last polled
|
||||
if (!lastJobUpdate) {
|
||||
lastJobUpdate = lastModifiedTime;
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
* this list of promises will tell us when the
|
||||
* mapPushJobs function will be applied to all the jobs
|
||||
* ie when we can register the job poller
|
||||
*/
|
||||
var mapPushJobsPromiseList = jobsPromiseList
|
||||
.map(jobsPromise => jobsPromise
|
||||
.then(jobs => mapPushJobs(jobs)));
|
||||
$q.all(mapPushJobsPromiseList)
|
||||
.then(() => {
|
||||
$rootScope.$emit(thEvents.jobsLoaded);
|
||||
if (!isAppend) {
|
||||
registerJobsPoller();
|
||||
}
|
||||
});
|
||||
});
|
||||
return appendPushes(data.results.length ? data : { results: [] });
|
||||
}
|
||||
thNotify.send('Error retrieving push data!', 'danger', { sticky: true });
|
||||
return appendPushes({ results: [] });
|
||||
}).then((pushes) => {
|
||||
// if ``nojobs`` is on the query string, then don't load jobs.
|
||||
// this allows someone to more quickly load ranges of revisions
|
||||
// when they don't care about the specific jobs and results.
|
||||
if ($location.search().nojobs) {
|
||||
return;
|
||||
}
|
||||
const jobsPromiseList = pushes.results.map(push => PushModel.getJobs(push.id));
|
||||
|
||||
Promise.all(jobsPromiseList)
|
||||
.then((pushJobList) => {
|
||||
var lastModifiedTimes = pushJobList
|
||||
.map(jobList => getLastModifiedJobTime(jobList))
|
||||
.filter(x => x);
|
||||
if (lastModifiedTimes.length) {
|
||||
var lastModifiedTime = max(lastModifiedTimes);
|
||||
// subtract 3 seconds to take in account a possible delay
|
||||
// between the job requests
|
||||
lastModifiedTime.setSeconds(lastModifiedTime.getSeconds() - 3);
|
||||
|
||||
// only update lastJobUpdate if previously unset, as we
|
||||
// may have other pushes which need an earlier update
|
||||
// if it's been a while since we last polled
|
||||
if (!lastJobUpdate) {
|
||||
lastJobUpdate = lastModifiedTime;
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
* this list of promises will tell us when the
|
||||
* mapPushJobs function will be applied to all the jobs
|
||||
* ie when we can register the job poller
|
||||
*/
|
||||
var mapPushJobsPromiseList = jobsPromiseList
|
||||
.map(jobsPromise => jobsPromise
|
||||
.then(jobs => mapPushJobs(jobs)));
|
||||
$q.all(mapPushJobsPromiseList)
|
||||
.then(() => {
|
||||
$rootScope.$emit(thEvents.jobsLoaded);
|
||||
if (!isAppend) {
|
||||
registerJobsPoller();
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
thNotify.send(`Error getting jobs: ${error.toString()}`, 'danger', { sticky: true });
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var getLastModifiedJobTime = function (jobList) {
|
||||
|
|
|
@ -0,0 +1,209 @@
|
|||
import { slugid } from 'taskcluster-client-web';
|
||||
import template from 'lodash/template';
|
||||
import templateSettings from 'lodash/templateSettings';
|
||||
import jsyaml from 'js-yaml';
|
||||
|
||||
import { thMaxPushFetchSize } from '../js/constants';
|
||||
import { create } from '../helpers/http';
|
||||
import { getUrlParam } from '../helpers/location';
|
||||
import taskcluster from '../helpers/taskcluster';
|
||||
import { createQueryParams, getProjectUrl } from '../helpers/url';
|
||||
import JobModel from './job';
|
||||
import TaskclusterModel from './taskcluster';
|
||||
|
||||
const uri_base = '/resultset/';
|
||||
|
||||
const convertDates = function (locationParams) {
|
||||
// support date ranges. we must convert the strings to a timezone
|
||||
// appropriate timestamp
|
||||
if ('startdate' in locationParams) {
|
||||
locationParams.push_timestamp__gte = Date.parse(locationParams.startdate) / 1000;
|
||||
|
||||
delete locationParams.startdate;
|
||||
}
|
||||
if ('enddate' in locationParams) {
|
||||
locationParams.push_timestamp__lt = Date.parse(locationParams.enddate) / 1000 + 84600;
|
||||
|
||||
delete locationParams.enddate;
|
||||
}
|
||||
return locationParams;
|
||||
};
|
||||
|
||||
export default class PushModel {
|
||||
static getList(options = {}) {
|
||||
const transformedOptions = convertDates(options);
|
||||
const repoName = transformedOptions.repo;
|
||||
delete transformedOptions.repo;
|
||||
const params = {
|
||||
full: true,
|
||||
count: 10,
|
||||
...transformedOptions,
|
||||
};
|
||||
|
||||
if (transformedOptions.push_timestamp__lte) {
|
||||
// we will likely re-fetch the oldest we already have, but
|
||||
// that's not guaranteed. There COULD be two pushes
|
||||
// with the same timestamp, theoretically.
|
||||
params.count++;
|
||||
}
|
||||
if (params.count > thMaxPushFetchSize || transformedOptions.push_timestamp__gte) {
|
||||
// fetch the maximum number of pushes
|
||||
params.count = thMaxPushFetchSize;
|
||||
}
|
||||
return fetch(`${getProjectUrl(uri_base, repoName)}${createQueryParams(params)}`);
|
||||
}
|
||||
|
||||
static get(pk, options = {}) {
|
||||
const repoName = options.repo || getUrlParam('repo');
|
||||
return fetch(getProjectUrl(`${uri_base}${pk}/`, repoName));
|
||||
}
|
||||
|
||||
static getJobs(pushIds, options = {}) {
|
||||
const { lastModified, repo } = options;
|
||||
delete options.lastModified;
|
||||
delete options.repo;
|
||||
const params = {
|
||||
return_type: 'list',
|
||||
count: 2000,
|
||||
...options,
|
||||
};
|
||||
|
||||
if (!Array.isArray(pushIds)) {
|
||||
params.result_set_id = pushIds;
|
||||
} else {
|
||||
params.result_set_id__in = pushIds.join(',');
|
||||
}
|
||||
if (lastModified) {
|
||||
// XXX: should never happen, but maybe sometimes does? see bug 1287501
|
||||
if (!(lastModified instanceof Date)) {
|
||||
throw Error(`Invalid parameter passed to get job updates: ${lastModified}. Please reload treeherder`);
|
||||
}
|
||||
params.last_modified__gt = lastModified.toISOString().replace('Z', '');
|
||||
}
|
||||
return JobModel.getList(repo, params, { fetch_all: true });
|
||||
}
|
||||
|
||||
static cancelAll(pushId) {
|
||||
return create(`${getProjectUrl(uri_base)}${pushId}/cancel_all/`);
|
||||
}
|
||||
|
||||
static triggerMissingJobs(decisionTaskId) {
|
||||
return TaskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const missingtask = results.actions.find(
|
||||
action => action.name === 'run-missing-tests');
|
||||
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (missingtask) {
|
||||
return TaskclusterModel.submit({
|
||||
action: missingtask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: {},
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(() => `Request sent to trigger missing jobs via actions.json (${actionTaskId})`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static triggerAllTalosJobs(times, decisionTaskId) {
|
||||
return TaskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const talostask = results.actions.find(
|
||||
action => action.name === 'run-all-talos');
|
||||
|
||||
if (talostask) {
|
||||
return TaskclusterModel.submit({
|
||||
action: talostask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: { times },
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(() => (
|
||||
`Request sent to trigger all talos jobs ${times} time(s) via actions.json (${actionTaskId})`
|
||||
));
|
||||
}
|
||||
} else {
|
||||
throw Error('Trigger All Talos Jobs no longer supported for this repository.');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static triggerNewJobs(buildernames, decisionTaskId) {
|
||||
const queue = taskcluster.getQueue();
|
||||
const url = queue.buildUrl(
|
||||
queue.getLatestArtifact, decisionTaskId, 'public/full-task-graph.json');
|
||||
|
||||
return fetch(url).then(resp => resp.json().then((graph) => {
|
||||
// Build a mapping of buildbot buildername to taskcluster tasklabel for bbb tasks
|
||||
const builderToTask = Object.entries(graph).reduce((currentMap, [key, value]) => {
|
||||
if (value && value.task && value.task.payload && value.task.payload.buildername) {
|
||||
currentMap[value.task.payload.buildername] = key;
|
||||
}
|
||||
return currentMap;
|
||||
}, {});
|
||||
const allLabels = Object.keys(graph);
|
||||
const tclabels = [];
|
||||
|
||||
buildernames.forEach(function (name) {
|
||||
// The following has 2 cases that it accounts for
|
||||
// 1. The name is a taskcluster task label, in which case we pass it on
|
||||
// 2. The name is a buildbot buildername _scheduled_ through bbb, in which case we
|
||||
// translate it to the taskcluster label that triggers it.
|
||||
name = builderToTask[name] || name;
|
||||
if (allLabels.indexOf(name) !== -1) {
|
||||
tclabels.push(name);
|
||||
}
|
||||
});
|
||||
if (tclabels.length === 0) {
|
||||
throw Error(`No tasks able to run for ${buildernames.join(', ')}`);
|
||||
}
|
||||
return TaskclusterModel.load(decisionTaskId).then((results) => {
|
||||
const actionTaskId = slugid();
|
||||
// In this case we have actions.json tasks
|
||||
if (results) {
|
||||
const addjobstask = results.actions.find(action => action.name === 'add-new-jobs');
|
||||
// We'll fall back to actions.yaml if this isn't true
|
||||
if (addjobstask) {
|
||||
return TaskclusterModel.submit({
|
||||
action: addjobstask,
|
||||
actionTaskId,
|
||||
decisionTaskId,
|
||||
taskId: null,
|
||||
task: null,
|
||||
input: { tasks: tclabels },
|
||||
staticActionVariables: results.staticActionVariables,
|
||||
}).then(() => `Request sent to trigger new jobs via actions.json (${actionTaskId})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise we'll figure things out with actions.yml
|
||||
// TODO: Remove when esr52 is EOL.
|
||||
const url = queue.buildUrl(
|
||||
queue.getLatestArtifact, decisionTaskId, 'public/action.yml');
|
||||
|
||||
return fetch(url).then(resp => resp.text().then((actionTemplate) => {
|
||||
templateSettings.interpolate = /{{([\s\S]+?)}}/g;
|
||||
const compiled = template(actionTemplate);
|
||||
const taskLabels = tclabels.join(',');
|
||||
const action = compiled({ decision_task_id: decisionTaskId, task_labels: taskLabels });
|
||||
const task = taskcluster.refreshTimestamps(jsyaml.safeLoad(action));
|
||||
|
||||
return queue.createTask(actionTaskId, task)
|
||||
.then(() => `Request sent to trigger new jobs via actions.yml (${actionTaskId})`);
|
||||
}));
|
||||
});
|
||||
}));
|
||||
}
|
||||
}
|
|
@ -7,21 +7,13 @@ import taskcluster from '../helpers/taskcluster';
|
|||
import { tcRootUrl } from '../helpers/url';
|
||||
|
||||
export default class TaskclusterModel {
|
||||
constructor(notify) {
|
||||
this.notify = notify;
|
||||
}
|
||||
|
||||
taskInContext(tagSetList, taskTags) {
|
||||
static taskInContext(tagSetList, taskTags) {
|
||||
return tagSetList.some(tagSet => Object.keys(tagSet)
|
||||
.every(tag => taskTags[tag] && taskTags[tag] === tagSet[tag]),
|
||||
);
|
||||
}
|
||||
|
||||
render(template, context) {
|
||||
return jsone(template, context);
|
||||
}
|
||||
|
||||
async submit({ action, actionTaskId, decisionTaskId, taskId,
|
||||
static async submit({ action, actionTaskId, decisionTaskId, taskId,
|
||||
task, input, staticActionVariables,
|
||||
}) {
|
||||
const context = _.defaults({}, {
|
||||
|
@ -73,10 +65,9 @@ export default class TaskclusterModel {
|
|||
}
|
||||
}
|
||||
|
||||
async load(decisionTaskID, job) {
|
||||
static async load(decisionTaskID, job) {
|
||||
if (!decisionTaskID) {
|
||||
this.notify.send('No decision task, can\'t find taskcluster actions', 'danger', { sticky: true });
|
||||
return;
|
||||
throw Error('No decision task, can\'t find taskcluster actions');
|
||||
}
|
||||
|
||||
const queue = taskcluster.getQueue();
|
||||
|
@ -116,8 +107,7 @@ export default class TaskclusterModel {
|
|||
return null;
|
||||
}
|
||||
if (jsonData.version !== 1) {
|
||||
this.notify.send('Wrong version of actions.json, can\'t continue', 'danger', { sticky: true });
|
||||
return;
|
||||
throw Error('Wrong version of actions.json, can\'t continue');
|
||||
}
|
||||
|
||||
// The filter in the value of the actions key is an implementation
|
||||
|
|
Загрузка…
Ссылка в новой задаче