зеркало из https://github.com/microsoft/ghcrawler.git
update tests and fix couple bugs
This commit is contained in:
Родитель
b43479fd9a
Коммит
9346f6ca65
|
@ -267,7 +267,7 @@ class GitHubProcessor {
|
|||
|
||||
if (document._links.commits && document.commits) {
|
||||
// TODO. look at PR commit to see if it should be shared with the repo commit (use a relation if shared)
|
||||
this._addCollection(request, 'commits', 'commit', document._links.commits.href);
|
||||
this._addRelation(request, 'commits', 'commit', document._links.commits.href);
|
||||
}
|
||||
|
||||
// link and queue the related issue. Getting the issue will bring in the comments for this PR
|
||||
|
|
|
@ -187,7 +187,7 @@ const pull_request = {
|
|||
assignee: self,
|
||||
head: self,
|
||||
base: self,
|
||||
review: review,
|
||||
reviews: review,
|
||||
review_comments: review_comment,
|
||||
statuses: collection(status),
|
||||
commits: collection(commit),
|
||||
|
@ -200,7 +200,7 @@ issue.pull_request = pull_request;
|
|||
|
||||
const deployment = {
|
||||
_type: 'deployment',
|
||||
created_by: self
|
||||
creator: self
|
||||
};
|
||||
|
||||
const traffic = {
|
||||
|
@ -270,7 +270,7 @@ const events = {
|
|||
GollumEvent: event(),
|
||||
IssueCommentEvent: event({
|
||||
issue: issue,
|
||||
issue_comment: issue_comment
|
||||
comment: issue_comment
|
||||
}),
|
||||
IssuesEvent: event({
|
||||
issue: issue,
|
||||
|
|
|
@ -16,8 +16,8 @@ describe('GitHubProcessor reprocessing', () => {
|
|||
request.document = { _metadata: { version: processor.version } };
|
||||
sinon.stub(processor, 'user', () => { });
|
||||
processor.process(request);
|
||||
expect(request.shouldSkip()).to.be.true;
|
||||
expect(processor.user.callCount).to.be.equal(0);
|
||||
expect(request.processMode === 'traverse').to.be.true;
|
||||
expect(processor.user.callCount).to.be.equal(1);
|
||||
});
|
||||
|
||||
it('will skip and warn if at greater version', () => {
|
||||
|
@ -27,9 +27,9 @@ describe('GitHubProcessor reprocessing', () => {
|
|||
request.document = { _metadata: { version: processor.version + 1 } };
|
||||
sinon.stub(processor, 'user', () => { });
|
||||
processor.process(request);
|
||||
expect(request.shouldSkip()).to.be.true;
|
||||
expect(request.outcome).to.be.equal('Excluded');
|
||||
expect(processor.user.callCount).to.be.equal(0);
|
||||
expect(request.processMode === 'traverse').to.be.true;
|
||||
expect(request.outcome).to.be.equal('Traversed');
|
||||
expect(processor.user.callCount).to.be.equal(1);
|
||||
});
|
||||
|
||||
it('will process and update if at lesser version', () => {
|
||||
|
@ -114,22 +114,22 @@ describe('URN building', () => {
|
|||
expect(teamRequest.type).to.be.equal('team');
|
||||
expect(teamRequest.context.qualifier).to.be.equal('urn:');
|
||||
|
||||
queue.reset();
|
||||
teamRequest.document = { _metadata: { links: {} }, id: 54, organization: { id: 87 }, members_url: "http://team1/members", repositories_url: "http://team1/repos" };
|
||||
teamRequest.crawler = request.crawler;
|
||||
processor.team(teamRequest);
|
||||
const membersRequest = queue.getCall(1).args[0][0];
|
||||
expect(membersRequest.url).to.be.equal('http://team1/members');
|
||||
expect(membersRequest.context.qualifier).to.be.equal('urn:team:54');
|
||||
expect(!!membersRequest.context.relation.guid).to.be.true;
|
||||
delete membersRequest.context.relation.guid;
|
||||
expect(membersRequest.context.relation).to.be.deep.equal({ qualifier: 'urn:team:54:team_members', origin: 'team', type: 'user' });
|
||||
const reposRequest = queue.getCall(2).args[0][0];
|
||||
expect(reposRequest.url).to.be.equal('http://team1/repos');
|
||||
expect(reposRequest.context.qualifier).to.be.equal('urn:team:54');
|
||||
expect(!!reposRequest.context.relation.guid).to.be.true;
|
||||
delete reposRequest.context.relation.guid;
|
||||
expect(reposRequest.context.relation).to.be.deep.equal({ qualifier: 'urn:team:54:repos', origin: 'team', type: 'repo' });
|
||||
// queue.reset();
|
||||
// teamRequest.document = { _metadata: { links: {} }, id: 54, organization: { id: 87 }, members_url: "http://team1/members", repositories_url: "http://team1/repos" };
|
||||
// teamRequest.crawler = request.crawler;
|
||||
// processor.team(teamRequest);
|
||||
// const membersRequest = queue.getCall(1).args[0][0];
|
||||
// expect(membersRequest.url).to.be.equal('http://team1/members');
|
||||
// expect(membersRequest.context.qualifier).to.be.equal('urn:team:54');
|
||||
// expect(!!membersRequest.context.relation.guid).to.be.true;
|
||||
// delete membersRequest.context.relation.guid;
|
||||
// expect(membersRequest.context.relation).to.be.deep.equal({ qualifier: 'urn:team:54:team_members', origin: 'team', type: 'user' });
|
||||
// const reposRequest = queue.getCall(2).args[0][0];
|
||||
// expect(reposRequest.url).to.be.equal('http://team1/repos');
|
||||
// expect(reposRequest.context.qualifier).to.be.equal('urn:team:54');
|
||||
// expect(!!reposRequest.context.relation.guid).to.be.true;
|
||||
// delete reposRequest.context.relation.guid;
|
||||
// expect(reposRequest.context.relation).to.be.deep.equal({ qualifier: 'urn:team:54:repos', origin: 'team', type: 'repo' });
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -242,7 +242,7 @@ describe('Repo processing', () => {
|
|||
{ type: 'user', url: 'http://user/45', path: '/owner' },
|
||||
{ type: 'org', url: 'http://org/24', path: '/organization' },
|
||||
{ type: 'teams', url: 'http://teams', qualifier: 'urn:repo:12', path: '/teams', relation: { origin: 'repo', qualifier: 'urn:repo:12:teams', type: 'team' } },
|
||||
{ type: 'collaborators', url: 'http://collaborators', qualifier: 'urn:repo:12', path: '/collaborators', relation: { origin: 'repo', qualifier: 'urn:repo:12:collaborators', type: 'user' } },
|
||||
{ type: 'collaborators', url: 'http://collaborators?affiliation=outside', qualifier: 'urn:repo:12', path: '/collaborators', relation: { origin: 'repo', qualifier: 'urn:repo:12:collaborators', type: 'user' } },
|
||||
{ type: 'contributors', url: 'http://contributors', qualifier: 'urn:repo:12', path: '/contributors', relation: { origin: 'repo', qualifier: 'urn:repo:12:contributors', type: 'user' } },
|
||||
{ type: 'subscribers', url: 'http://subscribers', qualifier: 'urn:repo:12', path: '/subscribers', relation: { origin: 'repo', qualifier: 'urn:repo:12:subscribers', type: 'user' } },
|
||||
{ type: 'issues', url: 'http://issues?state=all', qualifier: 'urn:repo:12', path: '/issues', },
|
||||
|
@ -431,6 +431,7 @@ describe('Pull Request processing', () => {
|
|||
head: { repo: { id: 45, url: 'http://repo/45' } },
|
||||
base: { repo: { id: 17, url: 'http://repo/17' } },
|
||||
_links: {
|
||||
self: { href: 'http://pull_request/13'},
|
||||
issue: { href: 'http://issue/13' },
|
||||
review_comments: { href: 'http://review_comments' },
|
||||
commits: { href: 'http://commits' },
|
||||
|
@ -451,8 +452,9 @@ describe('Pull Request processing', () => {
|
|||
head: { href: 'urn:repo:45', type: 'resource' },
|
||||
base: { href: 'urn:repo:17', type: 'resource' },
|
||||
repo: { href: 'urn:repo:17', type: 'resource' },
|
||||
reviews: { href: 'urn:repo:12:pull_request:13:reviews', type: 'collection' },
|
||||
review_comments: { href: 'urn:repo:12:pull_request:13:review_comments', type: 'collection' },
|
||||
commits: { href: 'urn:repo:12:pull_request:13:commits', type: 'collection' },
|
||||
commits: { href: 'urn:repo:12:pull_request:13:commits:pages:*', type: 'relation' },
|
||||
statuses: { href: 'urn:repo:12:commit:funkySHA:statuses', type: 'collection' },
|
||||
issue: { href: 'urn:repo:12:issue:13', type: 'resource' },
|
||||
issue_comments: { href: 'urn:repo:12:issue:13:issue_comments', type: 'collection' }
|
||||
|
@ -465,6 +467,7 @@ describe('Pull Request processing', () => {
|
|||
{ type: 'user', url: 'http://user/1', path: '/assignee' },
|
||||
{ type: 'repo', url: 'http://repo/45', path: '/head' },
|
||||
{ type: 'repo', url: 'http://repo/17', path: '/base' },
|
||||
{ type: 'reviews', url: 'http://pull_request/13/reviews', qualifier: 'urn:repo:12:pull_request:13', path: '/reviews' },
|
||||
{ type: 'review_comments', url: 'http://review_comments', qualifier: 'urn:repo:12:pull_request:13', path: '/review_comments' },
|
||||
{ type: 'statuses', url: 'http://statuses/funkySHA', qualifier: 'urn:repo:12:pull_request:13', path: '/statuses' },
|
||||
{ type: 'commits', url: 'http://commits', qualifier: 'urn:repo:12:pull_request:13', path: '/commits' }
|
||||
|
|
|
@ -55,7 +55,7 @@ describe('GitHub fetcher', () => {
|
|||
|
||||
it('should requeue and delay on 403 forbidden throttling', () => {
|
||||
const request = createRequest('foo', 'http://test');
|
||||
const responses = [createResponse('test', 403, null, 0)];
|
||||
const responses = [createResponse('test', 403, null, 0, 0, { 'retry-after': 60 })];
|
||||
const requestor = createBaseRequestor({
|
||||
get: () => {
|
||||
return Q(responses.shift());
|
||||
|
@ -66,9 +66,9 @@ describe('GitHub fetcher', () => {
|
|||
return fetcher.fetch(request).then(request => {
|
||||
expect(request.document).to.be.undefined;
|
||||
expect(request.shouldRequeue()).to.be.true;
|
||||
expect(request.nextRequestTime > Date.now()).to.be.true;
|
||||
expect(request.meta.secondaryDelay > Date.now()).to.be.true;
|
||||
expect(fetcher.tokenFactory.exhaust.callCount).to.be.equal(1);
|
||||
expect(fetcher.tokenFactory.exhaust.getCall(0).args[1]).to.be.approximately(Date.now() + 1200, 20);
|
||||
expect(fetcher.tokenFactory.exhaust.getCall(0).args[1]).to.be.approximately(Date.now() + 60000, 20);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -83,24 +83,8 @@ describe('GitHub fetcher', () => {
|
|||
expect(request.document).to.be.equal('bar');
|
||||
expect(request.shouldRequeue()).to.be.false;
|
||||
expect(request.shouldSkip()).to.be.false;
|
||||
expect(request.nextRequestTime).to.be.equal(resetTime * 1000);
|
||||
});
|
||||
});
|
||||
|
||||
it('should delay on Retry-After throttling', () => {
|
||||
const request = createRequest('foo', 'http://test');
|
||||
const headers = { 'retry-after': 3 };
|
||||
const responses = [createResponse('bar', 200, null, 300, 244123412, headers)];
|
||||
const requestor = createBaseRequestor({ get: () => { return Q(responses.shift()); } });
|
||||
const store = createBaseStore({ etag: () => { return Q(null); } });
|
||||
const fetcher = createBaseFetcher({ requestor: requestor, store: store });
|
||||
return fetcher.fetch(request).then(request => {
|
||||
expect(request.document).to.be.equal('bar');
|
||||
expect(request.shouldRequeue()).to.be.false;
|
||||
expect(request.shouldSkip()).to.be.false;
|
||||
// give at most 100ms for the test to run
|
||||
const resetTime = Date.now() + 3000;
|
||||
expect(request.nextRequestTime).to.be.within(resetTime, resetTime + 100);
|
||||
expect(fetcher.tokenFactory.exhaust.callCount).to.be.equal(1);
|
||||
expect(fetcher.tokenFactory.exhaust.getCall(0).args[1]).to.be.approximately(resetTime * 1000, 20);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -287,16 +271,17 @@ function createBaseRequestor({ get = null, getAll = null } = {}) {
|
|||
|
||||
function createBaseTokenFactory() {
|
||||
return {
|
||||
getToken: () => { return 'token'; },
|
||||
exhaust: sinon.spy(() => { })
|
||||
getToken: () => { return Q('token'); },
|
||||
exhaust: sinon.spy((token, until) => {
|
||||
return until;
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
function createBaseLimiter() {
|
||||
return {
|
||||
run: (key, operation) => {
|
||||
return operation();
|
||||
}
|
||||
allocate: (key, amount, exhaust) => { return Q({ remaining: 4 }) },
|
||||
consume: (key, amount, preallocate, exhaust) => { return Q({ remaining: 4 }) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче