This commit is contained in:
Marco 2019-06-11 21:53:41 +02:00 коммит произвёл GitHub
Родитель f270aabb28
Коммит d21b2b32a6
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
49 изменённых файлов: 4494 добавлений и 2976 удалений

3
.flake8 Normal file
Просмотреть файл

@ -0,0 +1,3 @@
[flake8]
exclude = .git,__pycache__,models,db,cache
ignore = E101, E111, E114, E115, E116, E117, E121, E122, E123, E124, E125, E126, E127, E128, E129, E131, E133, E2, E3, E5, E501, E701, E702, E703, E704, W1, W2, W3, W503, W504, C101

2
.isort.cfg Normal file
Просмотреть файл

@ -0,0 +1,2 @@
[settings]
known_third_party = apiclient,connection,dateutil,hglib,httplib2,icalendar,oauth2client,pytz,requests,requests_futures,responses,setuptools,six,whatthepatch

41
.pre-commit-config.yaml Normal file
Просмотреть файл

@ -0,0 +1,41 @@
repos:
- repo: https://github.com/asottile/seed-isort-config
rev: v1.9.1
hooks:
- id: seed-isort-config
- repo: https://github.com/pre-commit/mirrors-isort
rev: v4.3.20
hooks:
- id: isort
- repo: https://github.com/ambv/black
rev: stable
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
rev: 3.7.7
hooks:
- id: flake8
additional_dependencies: ['flake8-coding==1.3.1', 'flake8-copyright==0.2.2', 'flake8-debugger==3.1.0', 'flake8-mypy==17.8.0']
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
hooks:
- id: trailing-whitespace
exclude: ^tests/uplift/|^tests/html/
- id: check-yaml
- id: mixed-line-ending
exclude: ^tests/html/
- id: name-tests-test
args: ['--django']
exclude: tests/auto_mock.py
- id: check-json
exclude: ^tests/mocks/
- repo: https://github.com/codespell-project/codespell
rev: v1.15.0
hooks:
- id: codespell
exclude: libmozdata/modules.json
- repo: meta
hooks:
- id: check-hooks-apply
- id: check-useless-excludes
python_version: python3.6

Просмотреть файл

@ -1,12 +1,14 @@
dist: xenial
language: python
python:
- "3.5"
- "3.6"
- "3.7"
install:
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install -r test-requirements.txt
script:
- flake8 .
- pre-commit run --all-files
- coverage run --source=libmozdata -m unittest discover tests/ --verbose
- python setup.py sdist
- pip install dist/libmozdata-$(cat VERSION).tar.gz

Просмотреть файл

@ -1,8 +1,8 @@
# Community Participation Guidelines
This repository is governed by Mozilla's code of conduct and etiquette guidelines.
This repository is governed by Mozilla's code of conduct and etiquette guidelines.
For more details, please read the
[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/).
[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/).
## How to Report
For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page.

Просмотреть файл

@ -3,6 +3,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from . import utils
from .bugzilla import Bugzilla
@ -17,29 +18,42 @@ class BZInfo(Bugzilla):
Args:
bugids (List[str]): list of bug ids or search query
"""
super(BZInfo, self).__init__(bugids,
include_fields=['id', 'severity', 'component', 'product', 'creator', 'assigned_to'],
bughandler=self.__bug_handler,
historyhandler=self.__history_handler)
super(BZInfo, self).__init__(
bugids,
include_fields=[
"id",
"severity",
"component",
"product",
"creator",
"assigned_to",
],
bughandler=self.__bug_handler,
historyhandler=self.__history_handler,
)
# commenthandler=self.__comment_handler)
self.info = {}
for bugid in self.bugids:
self.info[bugid] = {'ownership': [],
'reviewers': set(),
'commenters': {},
'authorized': False}
self.reply_pattern = re.compile(r'^\(In reply to .* comment #([0-9]+)\)')
self.dupbug_pattern = re.compile(r'\*\*\* Bug [0-9]+ has been marked as a duplicate of this bug. \*\*\*')
self.review_pattern = re.compile(r'review\?\(([^\)]+)\)')
self.needinfo_pattern = re.compile(r'needinfo\?\(([^\)]+)\)')
self.feedback_pattern = re.compile(r'feedback\?\(([^\)]+)\)')
self.info[bugid] = {
"ownership": [],
"reviewers": set(),
"commenters": {},
"authorized": False,
}
self.reply_pattern = re.compile(r"^\(In reply to .* comment #([0-9]+)\)")
self.dupbug_pattern = re.compile(
r"\*\*\* Bug [0-9]+ has been marked as a duplicate of this bug. \*\*\*"
)
self.review_pattern = re.compile(r"review\?\(([^\)]+)\)")
self.needinfo_pattern = re.compile(r"needinfo\?\(([^\)]+)\)")
self.feedback_pattern = re.compile(r"feedback\?\(([^\)]+)\)")
self.get_data()
def get(self):
"""Get the information
Returns:
dict: dictionary containing informations
dict: dictionary containing information
"""
self.wait()
return self.info
@ -64,12 +78,12 @@ class BZInfo(Bugzilla):
# TODO: We could weight a contrib with a gaussian which depends to the time
collaborations = {}
for info in self.get().values():
if info['authorized']:
owner = info['owner']
if info["authorized"]:
owner = info["owner"]
if owner not in collaborations:
collaborations[owner] = 0
reviewers = info['reviewers']
feedbacks = info['feedbacks']
reviewers = info["reviewers"]
feedbacks = info["feedbacks"]
collabs = set()
if reviewers and owner in reviewers:
collabs |= reviewers[owner]
@ -78,7 +92,11 @@ class BZInfo(Bugzilla):
if collabs:
collaborations[owner] += len(collabs)
for person in collabs:
collaborations[person] = collaborations[person] + 1 if person in collaborations else 1
collaborations[person] = (
collaborations[person] + 1
if person in collaborations
else 1
)
# maybe we should compute the percentage of collaborations just to give an idea
@ -96,9 +114,11 @@ class BZInfo(Bugzilla):
comps_prods = {}
for info in self.get().values():
if info['authorized']:
comp_prod = (info['component'], info['product'])
comps_prods[comp_prod] = comps_prods[comp_prod] + 1 if comp_prod in comps_prods else 1
if info["authorized"]:
comp_prod = (info["component"], info["product"])
comps_prods[comp_prod] = (
comps_prods[comp_prod] + 1 if comp_prod in comps_prods else 1
)
return utils.get_best(comps_prods)
@ -109,12 +129,16 @@ class BZInfo(Bugzilla):
bug (dict): json data
data (dict): the container which will receive the data
"""
self.info[str(bug['id'])].update({'authorized': True,
'severity': bug['severity'],
'component': bug['component'],
'product': bug['product'],
'reporter': bug['creator'],
'owner': bug['assigned_to_detail']['email']})
self.info[str(bug["id"])].update(
{
"authorized": True,
"severity": bug["severity"],
"component": bug["component"],
"product": bug["product"],
"reporter": bug["creator"],
"owner": bug["assigned_to_detail"]["email"],
}
)
def __history_handler(self, bug):
"""Handler to use with the history retrieved from bugzilla
@ -126,23 +150,23 @@ class BZInfo(Bugzilla):
ownership = []
reviewers = {}
feedbacks = {}
bugid = str(bug['id'])
history = bug['history']
bugid = str(bug["id"])
history = bug["history"]
for h in history:
who = h['who']
who = h["who"]
owner = None
changes = h['changes']
changes = h["changes"]
for change in changes:
nam = change['field_name']
rem = change['removed']
add = change['added']
nam = change["field_name"]
rem = change["removed"]
add = change["added"]
if nam == 'status':
if rem == 'NEW' and add == 'ASSIGNED':
if nam == "status":
if rem == "NEW" and add == "ASSIGNED":
owner = who
elif nam == 'assigned_to':
elif nam == "assigned_to":
owner = add
elif nam == 'flagtypes.name':
elif nam == "flagtypes.name":
# Get the reviewers
for m in self.review_pattern.finditer(add):
if who in reviewers:
@ -157,14 +181,14 @@ class BZInfo(Bugzilla):
else:
feedbacks[who] = set([m.group(1)])
if owner and (not ownership or ownership[-1]['owner'] != owner):
ownership.append({'owner': owner,
'touch_by': who,
'touch_when': h['when']})
if owner and (not ownership or ownership[-1]["owner"] != owner):
ownership.append(
{"owner": owner, "touch_by": who, "touch_when": h["when"]}
)
self.info[bugid].update({'ownership': ownership,
'reviewers': reviewers,
'feedbacks': feedbacks})
self.info[bugid].update(
{"ownership": ownership, "reviewers": reviewers, "feedbacks": feedbacks}
)
def __comment_handler(self, bug, bugid):
"""Handler to use with the comment retrieved from bugzilla
@ -173,21 +197,21 @@ class BZInfo(Bugzilla):
bug (dict): json data
data (dict): the container which will receive the data
"""
assert 'comments' in bug
assert "comments" in bug
commenters = {}
authors = []
for comment in bug['comments']:
text = comment['text']
for comment in bug["comments"]:
text = comment["text"]
if not self.dupbug_pattern.match(text):
author = comment['author']
author = comment["author"]
authors.append(author)
if author not in commenters:
commenters[author] = []
for m in self.reply_pattern.finditer(comment['raw_text']):
for m in self.reply_pattern.finditer(comment["raw_text"]):
n = int(m.group(1))
if n >= 1 and n <= len(authors):
commenters[authors[n - 1]].append(author)
self.info[bugid].update({'commenters': commenters})
self.info[bugid].update({"commenters": commenters})

Просмотреть файл

@ -3,13 +3,14 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from connection import Connection
class FXRevision(Connection):
ARCHIVES_URL = 'http://archive.mozilla.org'
NIGHTLY_URL = ARCHIVES_URL + '/pub/firefox/nightly/'
ARCHIVES_URL = "http://archive.mozilla.org"
NIGHTLY_URL = ARCHIVES_URL + "/pub/firefox/nightly/"
def __init__(self, versions, fx_version, os):
super(FXRevision, self).__init__(self.ARCHIVES_URL)
@ -17,7 +18,9 @@ class FXRevision(Connection):
self.fx_version = fx_version
self.os = os
self.info = {}
pattern = re.compile('([0-9]{4})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})')
pattern = re.compile(
"([0-9]{4})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})"
)
for version in versions:
m = pattern.search(version)
self.dates[version] = [m.group(i) for i in range(1, 7)]
@ -29,17 +32,29 @@ class FXRevision(Connection):
return self.info
def __make_url(self, l):
return '%s%s/%s/%s-mozilla-central/firefox-%s.en-US.%s.json' % (self.NIGHTLY_URL, l[0], l[1], '-'.join(l), self.fx_version, self.os)
return "%s%s/%s/%s-mozilla-central/firefox-%s.en-US.%s.json" % (
self.NIGHTLY_URL,
l[0],
l[1],
"-".join(l),
self.fx_version,
self.os,
)
def __info_cb(self, res, *args, **kwargs):
json = res.json()
self.info[json['buildid']] = json['moz_source_stamp']
self.info[json["buildid"]] = json["moz_source_stamp"]
def __get_info(self):
for date in self.dates.values():
self.results.append(self.session.get(self.__make_url(date),
timeout=self.TIMEOUT,
hooks={'response': self.__info_cb}))
self.results.append(
self.session.get(
self.__make_url(date),
timeout=self.TIMEOUT,
hooks={"response": self.__info_cb},
)
)
# fxr = FXRevision(['20160223030304'], '47.0a1', 'linux-i686')
# pprint(fxr.get())

Просмотреть файл

@ -3,22 +3,22 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
from datetime import (datetime, timedelta)
import numbers
from datetime import datetime, timedelta
from pprint import pprint
from .HGFileInfo import HGFileInfo
from . import config, modules, utils
from .BZInfo import BZInfo
from . import modules
from . import utils
from . import config
from .HGFileInfo import HGFileInfo
class FileStats(object):
"""Stats about a file in the repo.
"""
def __init__(self, path, channel='nightly', node='default', utc_ts=None, max_days=None):
def __init__(
self, path, channel="nightly", node="default", utc_ts=None, max_days=None
):
"""Constructor
Args:
@ -27,31 +27,43 @@ class FileStats(object):
node (Optional[str]): the node, by default 'default'
utc_ts (Optional[int]): UTC timestamp, file pushdate <= utc_ts
"""
self.utc_ts = utc_ts if isinstance(utc_ts, numbers.Number) and utc_ts > 0 else None
self.max_days = max_days if isinstance(max_days, numbers.Number) else int(config.get('FileStats', 'MaxDays', 3))
self.utc_ts_from = utils.get_timestamp(datetime.utcfromtimestamp(utc_ts) + timedelta(-self.max_days)) if isinstance(utc_ts, numbers.Number) and utc_ts > 0 else None
self.utc_ts = (
utc_ts if isinstance(utc_ts, numbers.Number) and utc_ts > 0 else None
)
self.max_days = (
max_days
if isinstance(max_days, numbers.Number)
else int(config.get("FileStats", "MaxDays", 3))
)
self.utc_ts_from = (
utils.get_timestamp(
datetime.utcfromtimestamp(utc_ts) + timedelta(-self.max_days)
)
if isinstance(utc_ts, numbers.Number) and utc_ts > 0
else None
)
self.path = path
self.hi = HGFileInfo(path, channel=channel, node=node)
self.module = modules.module_from_path(path)
def get_static_info(self):
info = {
'path': self.path,
'guilty': None,
'needinfo': None,
'components': set()
"path": self.path,
"guilty": None,
"needinfo": None,
"components": set(),
}
if self.module is not None:
info['module'] = self.module['name']
info['components'].update(self.module['bugzillaComponents'])
info['owners'] = self.module['owners']
info['peers'] = self.module['peers']
info["module"] = self.module["name"]
info["components"].update(self.module["bugzillaComponents"])
info["owners"] = self.module["owners"]
info["peers"] = self.module["peers"]
return info
def get_last_patches(self):
return self.hi.get(self.path, self.utc_ts_from, self.utc_ts)['patches']
return self.hi.get(self.path, self.utc_ts_from, self.utc_ts)["patches"]
def get_info(self, guilty_only=False):
"""Get info
@ -73,37 +85,55 @@ class FileStats(object):
stats = {}
last_author = None
for patch in last:
author = patch['user']
author = patch["user"]
if not last_author:
last_author = author
stats[author] = stats[author] + 1 if author in stats else 1
info['guilty'] = {'main_author': utils.get_best(stats) if stats else None,
'last_author': last_author,
'patches': last}
info["guilty"] = {
"main_author": utils.get_best(stats) if stats else None,
"last_author": last_author,
"patches": last,
}
bugs = self.hi.get(self.path)['bugs']
bugs = self.hi.get(self.path)["bugs"]
bi = BZInfo(bugs) if bugs else None
if bi:
# find out the good person to query for a needinfo
info['needinfo'] = bi.get_best_collaborator()
info["needinfo"] = bi.get_best_collaborator()
comp_prod = bi.get_best_component_product()
if comp_prod:
info['infered_component'] = comp_prod[1] + '::' + comp_prod[0]
info['bugs'] = len(bugs)
info["infered_component"] = comp_prod[1] + "::" + comp_prod[0]
info["bugs"] = len(bugs)
return info
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='File Stats')
parser.add_argument('-p', '--path', action='store', help='file path')
parser.add_argument('-n', '--node', action='store', default='default', help='Mercurial node, by default \'default\'')
parser.add_argument('-c', '--channel', action='store', default='nightly', help='release channel')
parser.add_argument('-d', '--date', action='store', default='today', help='max date for pushdate, format YYYY-mm-dd')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="File Stats")
parser.add_argument("-p", "--path", action="store", help="file path")
parser.add_argument(
"-n",
"--node",
action="store",
default="default",
help="Mercurial node, by default 'default'",
)
parser.add_argument(
"-c", "--channel", action="store", default="nightly", help="release channel"
)
parser.add_argument(
"-d",
"--date",
action="store",
default="today",
help="max date for pushdate, format YYYY-mm-dd",
)
args = parser.parse_args()
if args.path:
fs = FileStats(args.path, args.channel, args.node, utils.get_timestamp(args.date))
fs = FileStats(
args.path, args.channel, args.node, utils.get_timestamp(args.date)
)
pprint(fs.get_info())

Просмотреть файл

@ -4,9 +4,11 @@
import logging
import re
import six
from .connection import Query
from . import hgmozilla
from .connection import Query
class HGFileInfo(object):
@ -18,7 +20,7 @@ class HGFileInfo(object):
MAX_REV_COUNT = 4095
def __init__(self, paths, channel='nightly', node='default', date_type='push'):
def __init__(self, paths, channel="nightly", node="default", date_type="push"):
"""Constructor
Args:
@ -28,13 +30,13 @@ class HGFileInfo(object):
"""
self.channel = channel
self.node = node
self.date_type = 'date' if date_type == 'creation' else 'pushdate'
self.date_type = "date" if date_type == "creation" else "pushdate"
self.data = {}
self.paths = [paths] if isinstance(paths, six.string_types) else paths
for p in self.paths:
self.data[p] = []
self.bug_pattern = re.compile(r'[\t ]*[Bb][Uu][Gg][\t ]*([0-9]+)')
self.rev_pattern = re.compile(r'r=([a-zA-Z0-9]+)')
self.bug_pattern = re.compile(r"[\t ]*[Bb][Uu][Gg][\t ]*([0-9]+)")
self.rev_pattern = re.compile(r"r=([a-zA-Z0-9]+)")
self.results = []
self.__get_info(self.paths, self.node)
@ -48,8 +50,8 @@ class HGFileInfo(object):
for result in self.results:
result.wait()
author_pattern = re.compile(r'<([^>]+)>')
email_pattern = re.compile(r'<?([\w\-\._\+%]+@[\w\-\._\+%]+)>?')
author_pattern = re.compile(r"<([^>]+)>")
email_pattern = re.compile(r"<?([\w\-\._\+%]+@[\w\-\._\+%]+)>?")
entries = self.data[path]
@ -62,38 +64,43 @@ class HGFileInfo(object):
# no pushdate
# TODO: find a way to estimate the pushdate (e.g. (prev + next) / 2 or use the author date)
if entry[self.date_type] == '':
logging.getLogger(__name__).warning('Entry for file %s with node %s has no pushdate' % (path, entry['node']))
if entry[self.date_type] == "":
logging.getLogger(__name__).warning(
"Entry for file %s with node %s has no pushdate"
% (path, entry["node"])
)
continue
assert isinstance(entry[self.date_type], list)
utc_date = entry[self.date_type][0]
if (utc_ts_from is not None and utc_ts_from > utc_date) or utc_ts_to < utc_date:
if (
utc_ts_from is not None and utc_ts_from > utc_date
) or utc_ts_to < utc_date:
continue
m = author_pattern.search(entry['user'])
m = author_pattern.search(entry["user"])
if m is None:
m = email_pattern.search(entry['user'])
m = email_pattern.search(entry["user"])
if m:
entry['user'] = m.group(1)
patch_author = entry['user']
entry["user"] = m.group(1)
patch_author = entry["user"]
if authors and patch_author not in authors:
continue
if patch_author not in authors_result:
authors_result[patch_author] = {'count': 1, 'reviewers': {}}
authors_result[patch_author] = {"count": 1, "reviewers": {}}
else:
authors_result[patch_author]['count'] += 1
authors_result[patch_author]["count"] += 1
info_desc = self.__get_info_from_desc(entry['desc'])
starter = info_desc['starter']
info_desc = self.__get_info_from_desc(entry["desc"])
starter = info_desc["starter"]
if starter:
bugs.add(info_desc['starter'])
bugs.add(info_desc["starter"])
reviewers = info_desc['reviewers']
reviewers = info_desc["reviewers"]
if reviewers:
_reviewers = authors_result[patch_author]['reviewers']
_reviewers = authors_result[patch_author]["reviewers"]
for reviewer in reviewers:
if reviewer not in _reviewers:
_reviewers[reviewer] = 1
@ -102,11 +109,7 @@ class HGFileInfo(object):
patches.append(entry)
return {
'authors': authors_result,
'bugs': bugs,
'patches': patches,
}
return {"authors": authors_result, "bugs": bugs, "patches": patches}
def __get_info_from_desc(self, desc):
"""Get some information from the patch description
@ -118,18 +121,16 @@ class HGFileInfo(object):
dict: some information
"""
desc = desc.strip()
info = {'starter': '',
'refs': set(),
'reviewers': set()}
s = info['refs']
info = {"starter": "", "refs": set(), "reviewers": set()}
s = info["refs"]
for m in self.bug_pattern.finditer(desc):
if m.start(0) == 0:
# the description begins with Bug 1234....
info['starter'] = m.group(1)
info["starter"] = m.group(1)
s.add(m.group(1))
for m in self.rev_pattern.finditer(desc):
info['reviewers'].add(m.group(1))
info["reviewers"].add(m.group(1))
return info
@ -140,11 +141,11 @@ class HGFileInfo(object):
json (dict): json
info (dict): info
"""
entries = json['entries']
entries = json["entries"]
if entries:
if len(entries) == HGFileInfo.MAX_REV_COUNT + 1:
self.data[path].extend(entries[:-1])
last_node = entries[-1]['node']
last_node = entries[-1]["node"]
self.__get_info([path], last_node)
else:
self.data[path].extend(entries)
@ -152,14 +153,14 @@ class HGFileInfo(object):
def __get_info(self, paths, node):
"""Get info
"""
__base = {'node': node,
'file': None,
'revcount': HGFileInfo.MAX_REV_COUNT + 1}
__base = {"node": node, "file": None, "revcount": HGFileInfo.MAX_REV_COUNT + 1}
queries = []
url = hgmozilla.FileInfo.get_url(self.channel)
for path in paths:
cparams = __base.copy()
cparams['file'] = path
queries.append(Query(url, cparams, handler=self.__handler, handlerdata=path))
cparams["file"] = path
queries.append(
Query(url, cparams, handler=self.__handler, handlerdata=path)
)
self.results.append(hgmozilla.FileInfo(queries=queries))

Просмотреть файл

@ -8,8 +8,10 @@ import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())

Просмотреть файл

@ -3,29 +3,47 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import functools
import six
import re
import requests
from .connection import (Connection, Query)
from . import config
from . import utils
from .handler import Handler
import six
import libmozdata.versions
from . import config, utils
from .connection import Connection, Query
from .handler import Handler
class Bugzilla(Connection):
"""Connection to bugzilla.mozilla.org
"""
URL = config.get('Bugzilla', 'URL', 'https://bugzilla.mozilla.org')
URL = config.get("Bugzilla", "URL", "https://bugzilla.mozilla.org")
# URL = config.get('Allizgub', 'URL', 'https://bugzilla-dev.allizom.org')
API_URL = URL + '/rest/bug'
ATTACHMENT_API_URL = API_URL + '/attachment'
TOKEN = config.get('Bugzilla', 'token', '')
API_URL = URL + "/rest/bug"
ATTACHMENT_API_URL = API_URL + "/attachment"
TOKEN = config.get("Bugzilla", "token", "")
# TOKEN = config.get('Allizgub', 'token', '')
BUGZILLA_CHUNK_SIZE = 100
def __init__(self, bugids=None, include_fields='_default', bughandler=None, bugdata=None, historyhandler=None, historydata=None, commenthandler=None, commentdata=None, comment_include_fields=None, attachmenthandler=None, attachmentdata=None, attachment_include_fields=None, queries=None, **kwargs):
def __init__(
self,
bugids=None,
include_fields="_default",
bughandler=None,
bugdata=None,
historyhandler=None,
historydata=None,
commenthandler=None,
commentdata=None,
comment_include_fields=None,
attachmenthandler=None,
attachmentdata=None,
attachment_include_fields=None,
queries=None,
**kwargs
):
"""Constructor
Args:
@ -69,14 +87,14 @@ class Bugzilla(Connection):
def get_header(self):
header = super(Bugzilla, self).get_header()
header['X-Bugzilla-API-Key'] = self.get_apikey()
header["X-Bugzilla-API-Key"] = self.get_apikey()
return header
def put(self, data, attachment=False, retry_on_failure=False):
"""Put some data in bugs
Args:
data (dict): a dictionnary
data (dict): a dictionary
"""
failures = []
if self.bugids:
@ -86,7 +104,7 @@ class Bugzilla(Connection):
ids = self.__get_bugs_list()
url = Bugzilla.ATTACHMENT_API_URL if attachment else Bugzilla.API_URL
url += '/'
url += "/"
to_retry = ids
header = self.get_header()
@ -94,7 +112,7 @@ class Bugzilla(Connection):
error = True
if res.status_code == 200:
json = res.json()
if not json.get('error', False):
if not json.get("error", False):
error = False
if error:
@ -109,15 +127,17 @@ class Bugzilla(Connection):
for _ids in Connection.chunks(_to_retry):
first_id = _ids[0]
if len(_ids) >= 2:
data['ids'] = _ids
elif 'ids' in data:
del data['ids']
self.session.put(url + first_id,
json=data,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': functools.partial(cb, _ids)}).result()
data["ids"] = _ids
elif "ids" in data:
del data["ids"]
self.session.put(
url + first_id,
json=data,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": functools.partial(cb, _ids)},
).result()
return failures
def get_data(self):
@ -178,14 +198,27 @@ class Bugzilla(Connection):
bugids = list(set(self.bugids).union(set(bz.bugids)))
include_fields = __merge_fields(self.include_fields, bz.include_fields)
comment_include_fields = __merge_fields(self.comment_include_fields, bz.comment_include_fields)
attachment_include_fields = __merge_fields(self.attachment_include_fields, bz.attachment_include_fields)
comment_include_fields = __merge_fields(
self.comment_include_fields, bz.comment_include_fields
)
attachment_include_fields = __merge_fields(
self.attachment_include_fields, bz.attachment_include_fields
)
bughandler = self.bughandler.merge(bz.bughandler)
historyhandler = self.historyhandler.merge(bz.historyhandler)
commenthandler = self.commenthandler.merge(bz.commenthandler)
attachmenthandler = self.attachmenthandler.merge(bz.attachmenthandler)
return Bugzilla(bugids=bugids, include_fields=include_fields, bughandler=bughandler, historyhandler=historyhandler, commenthandler=commenthandler, attachmenthandler=attachmenthandler, comment_include_fields=comment_include_fields, attachment_include_fields=attachment_include_fields)
return Bugzilla(
bugids=bugids,
include_fields=include_fields,
bughandler=bughandler,
historyhandler=historyhandler,
commenthandler=commenthandler,
attachmenthandler=attachmenthandler,
comment_include_fields=comment_include_fields,
attachment_include_fields=attachment_include_fields,
)
def __get_no_private_ids(self):
if not self.no_private_bugids:
@ -196,7 +229,7 @@ class Bugzilla(Connection):
def get_nightly_version():
def handler(json, data):
max_version = -1
pat = re.compile('cf_status_firefox([0-9]+)')
pat = re.compile("cf_status_firefox([0-9]+)")
for key in json.keys():
m = pat.match(key)
if m:
@ -206,16 +239,16 @@ class Bugzilla(Connection):
data[0] = max_version
nightly_version = [-1]
Bugzilla(bugids=['1234567'], bughandler=handler, bugdata=nightly_version).wait()
Bugzilla(bugids=["1234567"], bughandler=handler, bugdata=nightly_version).wait()
return nightly_version[0]
@staticmethod
def get_links(bugids):
if isinstance(bugids, six.string_types) or isinstance(bugids, int):
return 'https://bugzilla.mozilla.org/' + str(bugids)
return "https://bugzilla.mozilla.org/" + str(bugids)
else:
return ['https://bugzilla.mozilla.org/' + str(bugid) for bugid in bugids]
return ["https://bugzilla.mozilla.org/" + str(bugid) for bugid in bugids]
@staticmethod
def follow_dup(bugids, only_final=True):
@ -228,30 +261,32 @@ class Bugzilla(Connection):
Returns:
dict: each bug in entry is mapped to the last bug in the duplicate chain (None if there's no dup and 'cycle' if a cycle is detected)
"""
include_fields = ['id', 'resolution', 'dupe_of']
include_fields = ["id", "resolution", "dupe_of"]
dup = {}
_set = set()
for bugid in bugids:
dup[str(bugid)] = None
def bughandler(bug):
if bug['resolution'] == 'DUPLICATE':
dupeofid = str(bug['dupe_of'])
dup[str(bug['id'])] = [dupeofid]
if bug["resolution"] == "DUPLICATE":
dupeofid = str(bug["dupe_of"])
dup[str(bug["id"])] = [dupeofid]
_set.add(dupeofid)
bz = Bugzilla(bugids=bugids, include_fields=include_fields, bughandler=bughandler).get_data()
bz = Bugzilla(
bugids=bugids, include_fields=include_fields, bughandler=bughandler
).get_data()
bz.wait_bugs()
def bughandler2(bug):
if bug['resolution'] == 'DUPLICATE':
bugid = str(bug['id'])
if bug["resolution"] == "DUPLICATE":
bugid = str(bug["id"])
for _id, dupid in dup.items():
if dupid and dupid[-1] == bugid:
dupeofid = str(bug['dupe_of'])
dupeofid = str(bug["dupe_of"])
if dupeofid == _id or dupeofid in dupid:
# avoid infinite loop if any
dup[_id].append('cycle')
dup[_id].append("cycle")
else:
dup[_id].append(dupeofid)
_set.add(dupeofid)
@ -275,12 +310,16 @@ class Bugzilla(Connection):
history_entries = []
for history_entry in history:
for change in history_entry['changes']:
for change in history_entry["changes"]:
matches = True
for change_key, change_value in change.items():
for key, value in change_to_match.items():
if key == change_key and value != change_value and value not in change_value.split(', '):
if (
key == change_key
and value != change_value
and value not in change_value.split(", ")
):
matches = False
break
@ -294,27 +333,67 @@ class Bugzilla(Connection):
return history_entries
@staticmethod
def get_landing_patterns(channels=['release', 'beta', 'aurora', 'nightly']):
def get_landing_patterns(channels=["release", "beta", "aurora", "nightly"]):
if not isinstance(channels, list):
channels = [channels]
landing_patterns = []
for channel in channels:
if channel in ['central', 'nightly']:
if channel in ["central", "nightly"]:
landing_patterns += [
(re.compile(r'://hg.mozilla.org/mozilla-central/rev/([0-9a-f]+)'), channel),
(re.compile(r'://hg.mozilla.org/mozilla-central/pushloghtml\?changeset=([0-9a-f]+)'), channel),
(
re.compile(
r"://hg.mozilla.org/mozilla-central/rev/([0-9a-f]+)"
),
channel,
),
(
re.compile(
r"://hg.mozilla.org/mozilla-central/pushloghtml\?changeset=([0-9a-f]+)"
),
channel,
),
]
elif channel == "inbound":
landing_patterns += [
(
re.compile(
r"://hg.mozilla.org/integration/mozilla-inbound/rev/([0-9a-f]+)"
),
"inbound",
)
]
elif channel in ["release", "beta", "aurora"]:
landing_patterns += [
(
re.compile(
r"://hg.mozilla.org/releases/mozilla-"
+ channel
+ "/rev/([0-9a-f]+)"
),
channel,
)
]
elif channel == "esr":
landing_patterns += [
(
re.compile(
r"://hg.mozilla.org/releases/mozilla-esr(?:[0-9]+)/rev/([0-9a-f]+)"
),
channel,
)
]
elif channel == "fx-team":
landing_patterns += [
(
re.compile(
r"://hg.mozilla.org/integration/fx-team/rev/([0-9a-f]+)"
),
"inbound",
)
]
elif channel == 'inbound':
landing_patterns += [(re.compile(r'://hg.mozilla.org/integration/mozilla-inbound/rev/([0-9a-f]+)'), 'inbound')]
elif channel in ['release', 'beta', 'aurora']:
landing_patterns += [(re.compile(r'://hg.mozilla.org/releases/mozilla-' + channel + '/rev/([0-9a-f]+)'), channel)]
elif channel == 'esr':
landing_patterns += [(re.compile(r'://hg.mozilla.org/releases/mozilla-esr(?:[0-9]+)/rev/([0-9a-f]+)'), channel)]
elif channel == 'fx-team':
landing_patterns += [(re.compile(r'://hg.mozilla.org/integration/fx-team/rev/([0-9a-f]+)'), 'inbound')]
else:
raise Exception('Unexpected channel: ' + channel)
raise Exception("Unexpected channel: " + channel)
return landing_patterns
@ -327,12 +406,14 @@ class Bugzilla(Connection):
for comment in comments:
for landing_pattern in landing_patterns:
for match in landing_pattern[0].finditer(comment['text']):
results.append({
'comment': comment,
'revision': match.group(1),
'channel': landing_pattern[1],
})
for match in landing_pattern[0].finditer(comment["text"]):
results.append(
{
"comment": comment,
"revision": match.group(1),
"channel": landing_pattern[1],
}
)
return results
@ -344,10 +425,10 @@ class Bugzilla(Connection):
status_flags = {}
for c, v in base_versions.items():
v = str(v)
if c == 'esr':
f = 'cf_status_firefox_esr' + v
if c == "esr":
f = "cf_status_firefox_esr" + v
else:
f = 'cf_status_firefox' + v
f = "cf_status_firefox" + v
status_flags[c] = f
return status_flags
@ -366,11 +447,18 @@ class Bugzilla(Connection):
return None
def bug_handler(bug, data):
data[str(bug['id'])] = utils.signatures_parser(bug.get('cf_crash_signature', None))
data[str(bug["id"])] = utils.signatures_parser(
bug.get("cf_crash_signature", None)
)
bugids = utils.get_str_list(bugids)
data = {bugid: [] for bugid in bugids}
Bugzilla(bugids=bugids, include_fields=['id', 'cf_crash_signature'], bughandler=bug_handler, bugdata=data).wait()
Bugzilla(
bugids=bugids,
include_fields=["id", "cf_crash_signature"],
bughandler=bug_handler,
bugdata=data,
).wait()
return data
@ -384,11 +472,14 @@ class Bugzilla(Connection):
Returns:
(list): list of accessible bugs
"""
def bughandler(bug, data):
data.append(str(bug['id']))
data.append(str(bug["id"]))
data = []
Bugzilla(bugids, include_fields=['id'], bughandler=bughandler, bugdata=data).wait()
Bugzilla(
bugids, include_fields=["id"], bughandler=bughandler, bugdata=data
).wait()
return data
@ -407,13 +498,17 @@ class Bugzilla(Connection):
def __get_bugs_for_history_comment(self):
"""Get history and comment (if there are some handlers) after a search query
"""
if self.historyhandler.isactive() or self.commenthandler.isactive() or self.attachmenthandler.isactive():
if (
self.historyhandler.isactive()
or self.commenthandler.isactive()
or self.attachmenthandler.isactive()
):
bugids = []
bughandler = self.bughandler
def __handler(bug, bd):
bughandler.handle(bug)
bd.append(bug['id'])
bd.append(bug["id"])
self.bughandler = Handler(__handler, bugids)
@ -443,7 +538,7 @@ class Bugzilla(Connection):
res: result
"""
if res.status_code == 200:
for bug in res.json()['bugs']:
for bug in res.json()["bugs"]:
self.bughandler.handle(bug)
def __get_bugs(self):
@ -451,59 +546,79 @@ class Bugzilla(Connection):
"""
header = self.get_header()
for bugids in Connection.chunks(sorted(self.bugids, key=lambda k: int(k))):
self.bugs_results.append(self.session.get(Bugzilla.API_URL,
params={'id': ','.join(map(str, bugids)),
'include_fields': self.include_fields},
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__bugs_cb}))
self.bugs_results.append(
self.session.get(
Bugzilla.API_URL,
params={
"id": ",".join(map(str, bugids)),
"include_fields": self.include_fields,
},
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__bugs_cb},
)
)
def __get_bugs_by_search(self):
"""Get the bugs in making a search query
"""
url = Bugzilla.API_URL + '?'
url = Bugzilla.API_URL + "?"
header = self.get_header()
specials = {'count_only', 'limit', 'order', 'offset'}
specials = {"count_only", "limit", "order", "offset"}
for query in self.bugids:
if isinstance(query, six.string_types):
url = Bugzilla.API_URL + '?' + query
self.bugs_results.append(self.session.get(url,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__bugs_cb}))
url = Bugzilla.API_URL + "?" + query
self.bugs_results.append(
self.session.get(
url,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__bugs_cb},
)
)
elif specials.isdisjoint(query.keys()):
url = Bugzilla.API_URL
params = query.copy()
params['count_only'] = 1
r = requests.get(url,
params=params,
headers=header,
verify=True,
timeout=self.TIMEOUT)
params["count_only"] = 1
r = requests.get(
url,
params=params,
headers=header,
verify=True,
timeout=self.TIMEOUT,
)
if r.ok:
count = r.json()['bug_count']
del params['count_only']
params['limit'] = Bugzilla.BUGZILLA_CHUNK_SIZE
params['order'] = 'bug_id'
count = r.json()["bug_count"]
del params["count_only"]
params["limit"] = Bugzilla.BUGZILLA_CHUNK_SIZE
params["order"] = "bug_id"
for i in range(0, count, Bugzilla.BUGZILLA_CHUNK_SIZE):
# Batch the execution to avoid timeouts
params = params.copy()
params['offset'] = i
self.bugs_results.append(self.session.get(url,
params=params,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__bugs_cb}))
params["offset"] = i
self.bugs_results.append(
self.session.get(
url,
params=params,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__bugs_cb},
)
)
else:
self.bugs_results.append(self.session.get(url,
params=query,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__bugs_cb}))
self.bugs_results.append(
self.session.get(
url,
params=query,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__bugs_cb},
)
)
def __get_bugs_list(self):
"""Get the bugs list corresponding to the search query
@ -512,18 +627,22 @@ class Bugzilla(Connection):
def cb(res, *args, **kwargs):
if res.status_code == 200:
for bug in res.json()['bugs']:
_list.add(bug['id'])
for bug in res.json()["bugs"]:
_list.add(bug["id"])
results = []
url = Bugzilla.API_URL + '?'
url = Bugzilla.API_URL + "?"
header = self.get_header()
for query in self.bugids:
results.append(self.session.get(url + query,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': cb}))
results.append(
self.session.get(
url + query,
headers=header,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": cb},
)
)
for r in results():
r.result()
@ -539,26 +658,30 @@ class Bugzilla(Connection):
"""
if res.status_code == 200:
json = res.json()
if 'bugs' in json and json['bugs']:
for h in json['bugs']:
if "bugs" in json and json["bugs"]:
for h in json["bugs"]:
self.historyhandler.handle(h)
def __get_history(self):
"""Get the bug history
"""
url = Bugzilla.API_URL + '/%s/history'
url = Bugzilla.API_URL + "/%s/history"
header = self.get_header()
# TODO: remove next line after the fix of bug 1283392
bugids = self.__get_no_private_ids()
for _bugids in Connection.chunks(sorted(bugids, key=lambda k: int(k))):
first = _bugids[0]
remainder = _bugids[1:] if len(_bugids) >= 2 else []
self.history_results.append(self.session.get(url % first,
headers=header,
params={'ids': remainder},
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__history_cb}))
self.history_results.append(
self.session.get(
url % first,
headers=header,
params={"ids": remainder},
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__history_cb},
)
)
def __comment_cb(self, res, *args, **kwargs):
"""Callback for bug comment
@ -569,8 +692,8 @@ class Bugzilla(Connection):
"""
if res.status_code == 200:
json = res.json()
if 'bugs' in json:
bugs = json['bugs']
if "bugs" in json:
bugs = json["bugs"]
if bugs:
for key in bugs.keys():
if isinstance(key, six.string_types) and key.isdigit():
@ -580,22 +703,26 @@ class Bugzilla(Connection):
def __get_comment(self):
"""Get the bug comment
"""
url = Bugzilla.API_URL + '/%s/comment'
url = Bugzilla.API_URL + "/%s/comment"
header = self.get_header()
# TODO: remove next line after the fix of bug 1283392
bugids = self.__get_no_private_ids()
for _bugids in Connection.chunks(sorted(bugids, key=lambda k: int(k))):
first = _bugids[0]
remainder = _bugids[1:] if len(_bugids) >= 2 else []
self.comment_results.append(self.session.get(url % first,
headers=header,
params={
'ids': remainder,
'include_fields': self.comment_include_fields
},
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__comment_cb}))
self.comment_results.append(
self.session.get(
url % first,
headers=header,
params={
"ids": remainder,
"include_fields": self.comment_include_fields,
},
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__comment_cb},
)
)
def __attachment_cb(self, res, *args, **kwargs):
"""Callback for bug attachment
@ -606,8 +733,8 @@ class Bugzilla(Connection):
"""
if res.status_code == 200:
json = res.json()
if 'bugs' in json:
bugs = json['bugs']
if "bugs" in json:
bugs = json["bugs"]
if bugs:
for key in bugs.keys():
if isinstance(key, six.string_types) and key.isdigit():
@ -617,33 +744,45 @@ class Bugzilla(Connection):
def __get_attachment(self):
"""Get the bug attachment
"""
url = Bugzilla.API_URL + '/%s/attachment'
url = Bugzilla.API_URL + "/%s/attachment"
header = self.get_header()
# TODO: remove next line after the fix of bug 1283392
bugids = self.__get_no_private_ids()
for _bugids in Connection.chunks(sorted(bugids, key=lambda k: int(k))):
first = _bugids[0]
remainder = _bugids[1:] if len(_bugids) >= 2 else []
self.attachment_results.append(self.session.get(url % first,
headers=header,
params={
'ids': remainder,
'include_fields': self.attachment_include_fields
},
verify=True,
timeout=self.TIMEOUT,
hooks={'response': self.__attachment_cb}))
self.attachment_results.append(
self.session.get(
url % first,
headers=header,
params={
"ids": remainder,
"include_fields": self.attachment_include_fields,
},
verify=True,
timeout=self.TIMEOUT,
hooks={"response": self.__attachment_cb},
)
)
class BugzillaUser(Connection):
"""Connection to bugzilla.mozilla.org
"""
URL = config.get('Bugzilla', 'URL', 'https://bugzilla.mozilla.org')
API_URL = URL + '/rest/user'
TOKEN = config.get('Bugzilla', 'token', '')
URL = config.get("Bugzilla", "URL", "https://bugzilla.mozilla.org")
API_URL = URL + "/rest/user"
TOKEN = config.get("Bugzilla", "token", "")
def __init__(self, user_names=None, search_strings=None, include_fields='_default', user_handler=None, user_data=None, **kwargs):
def __init__(
self,
user_names=None,
search_strings=None,
include_fields="_default",
user_handler=None,
user_data=None,
**kwargs
):
"""Constructor
Args:
@ -660,30 +799,48 @@ class BugzillaUser(Connection):
user_names = [user_names]
params = {
'include_fields': include_fields,
'names': [user_name for user_name in user_names if isinstance(user_name, six.string_types) and not user_name.isdigit()],
'ids': [str(user_id) for user_id in user_names if isinstance(user_id, int) or user_id.isdigit()],
"include_fields": include_fields,
"names": [
user_name
for user_name in user_names
if isinstance(user_name, six.string_types)
and not user_name.isdigit()
],
"ids": [
str(user_id)
for user_id in user_names
if isinstance(user_id, int) or user_id.isdigit()
],
}
super(BugzillaUser, self).__init__(BugzillaUser.URL, Query(BugzillaUser.API_URL, params, self.__users_cb), **kwargs)
super(BugzillaUser, self).__init__(
BugzillaUser.URL,
Query(BugzillaUser.API_URL, params, self.__users_cb),
**kwargs
)
elif search_strings is not None:
if isinstance(search_strings, six.string_types):
search_strings = [search_strings]
queries = []
for search_string in search_strings:
queries.append(Query(BugzillaUser.API_URL + '?' + search_string, handler=self.__users_cb))
queries.append(
Query(
BugzillaUser.API_URL + "?" + search_string,
handler=self.__users_cb,
)
)
super(BugzillaUser, self).__init__(BugzillaUser.URL, queries, **kwargs)
def get_header(self):
header = super(BugzillaUser, self).get_header()
header['X-Bugzilla-API-Key'] = self.get_apikey()
header["X-Bugzilla-API-Key"] = self.get_apikey()
return header
def __users_cb(self, res):
if not self.user_handler.isactive():
return
for user in res['users']:
for user in res["users"]:
self.user_handler.handle(user)

Просмотреть файл

@ -2,9 +2,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import os
import json
import os
import re
from collections import defaultdict
from distutils.version import LooseVersion

Просмотреть файл

@ -3,6 +3,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
try:
from configparser import ConfigParser
except ImportError:
@ -10,13 +11,11 @@ except ImportError:
class Config(object):
def get(section, option, default=None):
raise NotImplementedError
class ConfigIni(Config):
def __init__(self, path=None):
self.config = ConfigParser()
if path is not None:
@ -26,7 +25,10 @@ class ConfigIni(Config):
self.path = path
def get_default_paths(self):
return [os.path.join(os.getcwd(), 'mozdata.ini'), os.path.expanduser('~/.mozdata.ini')]
return [
os.path.join(os.getcwd(), "mozdata.ini"),
os.path.expanduser("~/.mozdata.ini"),
]
def get(self, section, option, default=None, type=str):
if not self.config.has_option(section, option):
@ -34,7 +36,7 @@ class ConfigIni(Config):
res = self.config.get(section, option)
if type == list or type == set:
return type([s.strip(' /t') for s in res.split(',')])
return type([s.strip(" /t") for s in res.split(",")])
else:
return type(res)
@ -43,9 +45,8 @@ class ConfigIni(Config):
class ConfigEnv(Config):
def get(self, section, option, default=None):
env = os.environ.get('LIBMOZDATA_CFG_' + section.upper() + '_' + option.upper())
env = os.environ.get("LIBMOZDATA_CFG_" + section.upper() + "_" + option.upper())
if not env:
return default
@ -57,7 +58,7 @@ __config = ConfigIni()
def set_config(conf):
if not isinstance(conf, Config):
raise TypeError('Argument must have type config.Config')
raise TypeError("Argument must have type config.Config")
global __config
__config = conf

Просмотреть файл

@ -3,11 +3,12 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import multiprocessing
from requests.adapters import HTTPAdapter
from requests_futures.sessions import FuturesSession
from requests.packages.urllib3.util.retry import Retry
from . import config
from . import utils
from requests_futures.sessions import FuturesSession
from . import config, utils
class Query(object):
@ -39,7 +40,9 @@ class Query(object):
if not isinstance(params_list, list):
params_list = [self.params]
return '\n'.join('url: %s' % self.url + self.params_repr(params) for params in params_list)
return "\n".join(
"url: %s" % self.url + self.params_repr(params) for params in params_list
)
class Connection(object):
@ -50,9 +53,11 @@ class Connection(object):
MAX_RETRIES = 256
MAX_WORKERS = multiprocessing.cpu_count()
CHUNK_SIZE = 32
TOKEN = ''
USER_AGENT = config.get('User-Agent', 'name', 'libmozdata')
X_FORWARDED_FOR = utils.get_x_fwed_for_str(config.get('X-Forwarded-For', 'data', ''))
TOKEN = ""
USER_AGENT = config.get("User-Agent", "name", "libmozdata")
X_FORWARDED_FOR = utils.get_x_fwed_for_str(
config.get("X-Forwarded-For", "data", "")
)
# Error 429 is for 'Too many requests' => we retry
STATUS_FORCELIST = [429]
@ -66,24 +71,30 @@ class Connection(object):
"""
self.session = FuturesSession(max_workers=self.MAX_WORKERS)
retries = Retry(total=Connection.MAX_RETRIES, backoff_factor=1, status_forcelist=Connection.STATUS_FORCELIST)
retries = Retry(
total=Connection.MAX_RETRIES,
backoff_factor=1,
status_forcelist=Connection.STATUS_FORCELIST,
)
self.session.mount(base_url, HTTPAdapter(max_retries=retries))
self.results = []
self.queries = queries
if kwargs:
if 'timeout' in kwargs:
self.TIMEOUT = kwargs['timeout']
if 'max_retries' in kwargs:
self.MAX_RETRIES = kwargs['max_retries']
if 'max_workers' in kwargs:
self.MAX_WORKERS = kwargs['max_workers']
if 'user_agent' in kwargs:
self.USER_AGENT = kwargs['user_agent']
if 'x_forwarded_for' in kwargs:
self.X_FORWARDED_FOR = utils.get_x_fwded_for_str(kwargs['x_forwarded_for'])
if 'raise_error' in kwargs:
self.RAISE_ERROR = kwargs['raise_error']
if "timeout" in kwargs:
self.TIMEOUT = kwargs["timeout"]
if "max_retries" in kwargs:
self.MAX_RETRIES = kwargs["max_retries"]
if "max_workers" in kwargs:
self.MAX_WORKERS = kwargs["max_workers"]
if "user_agent" in kwargs:
self.USER_AGENT = kwargs["user_agent"]
if "x_forwarded_for" in kwargs:
self.X_FORWARDED_FOR = utils.get_x_fwded_for_str(
kwargs["x_forwarded_for"]
)
if "raise_error" in kwargs:
self.RAISE_ERROR = kwargs["raise_error"]
else:
self.RAISE_ERROR = False
@ -98,6 +109,7 @@ class Connection(object):
Returns:
function: the callback for the query
"""
def cb(res, *args, **kwargs):
if res.status_code == 200:
try:
@ -112,9 +124,9 @@ class Connection(object):
elif self.RAISE_ERROR:
res.raise_for_status()
else:
print('Connection error:')
print(' url: ', res.url)
print(' text: ', res.text)
print("Connection error:")
print(" url: ", res.url)
print(" text: ", res.text)
return cb
@ -139,9 +151,13 @@ class Connection(object):
dict: the header
"""
if self.X_FORWARDED_FOR:
return {'User-Agent': self.USER_AGENT, 'X-Forwarded-For': self.X_FORWARDED_FOR, 'Connection': 'close'}
return {
"User-Agent": self.USER_AGENT,
"X-Forwarded-For": self.X_FORWARDED_FOR,
"Connection": "close",
}
else:
return {'User-Agent': self.USER_AGENT, 'Connection': 'close'}
return {"User-Agent": self.USER_AGENT, "Connection": "close"}
def get_auth(self):
"""Get the auth to use each query
@ -171,29 +187,41 @@ class Connection(object):
cb = self.__get_cb(query)
if query.params:
if isinstance(query.params, dict):
self.results.append(self.session.get(query.url,
params=query.params,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': cb}))
self.results.append(
self.session.get(
query.url,
params=query.params,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": cb},
)
)
else:
for p in query.params:
self.results.append(self.session.get(query.url,
params=p,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': cb}))
self.results.append(
self.session.get(
query.url,
params=p,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": cb},
)
)
else:
self.results.append(self.session.get(query.url,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={'response': cb}))
self.results.append(
self.session.get(
query.url,
headers=header,
auth=auth,
verify=True,
timeout=self.TIMEOUT,
hooks={"response": cb},
)
)
@staticmethod
def chunks(l, chunk_size=CHUNK_SIZE):
@ -207,4 +235,4 @@ class Connection(object):
a chunk from the data
"""
for i in range(0, len(l), chunk_size):
yield l[i:(i + chunk_size)]
yield l[i : (i + chunk_size)]

Просмотреть файл

@ -1,47 +1,47 @@
import httplib2
import os
from apiclient import discovery
import oauth2client
from oauth2client import client
from oauth2client import tools
import argparse
import base64
from os.path import basename
from email.mime.text import MIMEText
import os
from email.mime.application import MIMEApplication
from email.mime.text import MIMEText
from email.MIMEMultipart import MIMEMultipart
from os.path import basename
import httplib2
import oauth2client
from apiclient import discovery
from oauth2client import client, tools
from . import config
SCOPES = ['https://www.googleapis.com/auth/gmail.send']
CREDENTIALS_PATH = os.path.expanduser(config.get('Gmail', 'credentials', ''))
SCOPES = ["https://www.googleapis.com/auth/gmail.send"]
CREDENTIALS_PATH = os.path.expanduser(config.get("Gmail", "credentials", ""))
def send(To, Subject, Body, Cc=[], Bcc=[], html=False, files=[]):
"""Send an email
"""
subtype = 'html' if html else 'plain'
subtype = "html" if html else "plain"
message = MIMEMultipart()
message['To'] = ', '.join(To)
message['Subject'] = Subject
message['Cc'] = ', '.join(Cc)
message['Bcc'] = ', '.join(Bcc)
message["To"] = ", ".join(To)
message["Subject"] = Subject
message["Cc"] = ", ".join(Cc)
message["Bcc"] = ", ".join(Bcc)
message.attach(MIMEText(Body, subtype))
for f in files:
with open(f, "rb") as In:
part = MIMEApplication(In.read(), Name=basename(f))
part['Content-Disposition'] = 'attachment; filename="%s"' % basename(f)
part["Content-Disposition"] = 'attachment; filename="%s"' % basename(f)
message.attach(part)
message = {'raw': base64.urlsafe_b64encode(message.as_string())}
message = {"raw": base64.urlsafe_b64encode(message.as_string())}
credentials = oauth2client.file.Storage(CREDENTIALS_PATH).get()
Http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=Http)
service = discovery.build("gmail", "v1", http=Http)
message = service.users().messages().send(userId='me', body=message).execute()
message = service.users().messages().send(userId="me", body=message).execute()
def create_credentials(client_secret_path):
@ -50,22 +50,32 @@ def create_credentials(client_secret_path):
Args:
path (str), path to client_secret.json file
"""
flow = client.flow_from_clientsecrets(client_secret_path, ' '.join(SCOPES))
flow.user_agent = 'Clouseau'
flow.params['access_type'] = 'offline'
flow.params['approval_prompt'] = 'force'
flow = client.flow_from_clientsecrets(client_secret_path, " ".join(SCOPES))
flow.user_agent = "Clouseau"
flow.params["access_type"] = "offline"
flow.params["approval_prompt"] = "force"
store = oauth2client.file.Storage(CREDENTIALS_PATH)
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args(['--noauth_local_webserver'])
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args(
["--noauth_local_webserver"]
)
tools.run_flow(flow, store, flags)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Create credentials to be able to send mail in using Gmail account')
parser.add_argument('--client-secret-path', dest='cs_path', action='store', default='', help='path to client_secret.json')
parser = argparse.ArgumentParser(
description="Create credentials to be able to send mail in using Gmail account"
)
parser.add_argument(
"--client-secret-path",
dest="cs_path",
action="store",
default="",
help="path to client_secret.json",
)
args = parser.parse_args()
if not args.cs_path:
raise Exception('You must provide the paths to client_secret.json')
raise Exception("You must provide the paths to client_secret.json")
create_credentials(args.cs_path)

Просмотреть файл

@ -4,7 +4,6 @@
class Handler(object):
def __init__(self, func=None, data=None):
self.handler = func
self.data = data
@ -12,7 +11,7 @@ class Handler(object):
def handle(self, *args):
if self.handler:
if self.data is not None:
args += (self.data, )
args += (self.data,)
self.handler(*args)
else:
self.handler(*args)
@ -50,7 +49,6 @@ class Handler(object):
class MultipleHandler(Handler):
def __init__(self, *args):
self.handler = []
for arg in args:

Просмотреть файл

@ -3,18 +3,19 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import six
from .connection import (Connection, Query)
from . import config
from .connection import Connection, Query
class Mercurial(Connection):
"""Mozilla mercurial connection: http://hg.mozilla.org
"""
HG_URL = config.get('Mercurial', 'URL', 'https://hg.mozilla.org')
remote = HG_URL == 'https://hg.mozilla.org'
HG_URL = config.get("Mercurial", "URL", "https://hg.mozilla.org")
remote = HG_URL == "https://hg.mozilla.org"
def __init__(self, queries, channel='nightly', **kwargs):
def __init__(self, queries, channel="nightly", **kwargs):
"""Constructor
Args:
@ -34,12 +35,12 @@ class Mercurial(Connection):
Returns:
str: the repo name
"""
if channel == 'nightly' or channel == 'central':
return 'mozilla-central'
elif channel == 'inbound':
return 'integration/mozilla-inbound'
if channel == "nightly" or channel == "central":
return "mozilla-central"
elif channel == "inbound":
return "integration/mozilla-inbound"
else:
return 'releases/mozilla-' + channel
return "releases/mozilla-" + channel
@staticmethod
def get_repo_url(channel):
@ -52,7 +53,7 @@ class Mercurial(Connection):
str: the repo url
"""
if Mercurial.remote:
return Mercurial.HG_URL + '/' + Mercurial.get_repo(channel)
return Mercurial.HG_URL + "/" + Mercurial.get_repo(channel)
else:
return Mercurial.HG_URL
@ -61,7 +62,15 @@ class Revision(Mercurial):
"""Connection to get a revision
"""
def __init__(self, channel='nightly', params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self,
channel="nightly",
params=None,
handler=None,
handlerdata=None,
queries=None,
**kwargs
):
"""Constructor
Args:
@ -74,7 +83,9 @@ class Revision(Mercurial):
if queries:
super(Revision, self).__init__(queries, **kwargs)
else:
super(Revision, self).__init__(Query(Revision.get_url(channel), params, handler, handlerdata), **kwargs)
super(Revision, self).__init__(
Query(Revision.get_url(channel), params, handler, handlerdata), **kwargs
)
@staticmethod
def get_url(channel):
@ -86,7 +97,7 @@ class Revision(Mercurial):
Returns:
str: the api url
"""
return Mercurial.get_repo_url(channel) + '/json-rev'
return Mercurial.get_repo_url(channel) + "/json-rev"
@staticmethod
def default_handler(json, data):
@ -99,7 +110,7 @@ class Revision(Mercurial):
data.update(json)
@staticmethod
def get_revision(channel='nightly', node='default'):
def get_revision(channel="nightly", node="default"):
"""Get the revision for a node
Args:
@ -110,7 +121,7 @@ class Revision(Mercurial):
dict: the revision corresponding to the node
"""
data = {}
Revision(channel, {'node': node}, Revision.default_handler, data).wait()
Revision(channel, {"node": node}, Revision.default_handler, data).wait()
return data
@ -118,7 +129,9 @@ class RawRevision(Mercurial):
"""Connection to get a raw revision
"""
def __init__(self, channel='nightly', params=None, handler=None, queries=None, **kwargs):
def __init__(
self, channel="nightly", params=None, handler=None, queries=None, **kwargs
):
"""Constructor
Args:
@ -131,7 +144,9 @@ class RawRevision(Mercurial):
if queries:
super(RawRevision, self).__init__(queries, **kwargs)
else:
super(RawRevision, self).__init__(Query(RawRevision.get_url(channel), params, handler), **kwargs)
super(RawRevision, self).__init__(
Query(RawRevision.get_url(channel), params, handler), **kwargs
)
@staticmethod
def get_url(channel):
@ -143,10 +158,10 @@ class RawRevision(Mercurial):
Returns:
str: the api url
"""
return Mercurial.get_repo_url(channel) + '/raw-rev'
return Mercurial.get_repo_url(channel) + "/raw-rev"
@staticmethod
def get_revision(channel='nightly', node='default'):
def get_revision(channel="nightly", node="default"):
"""Get the revision for a node
Args:
@ -159,18 +174,26 @@ class RawRevision(Mercurial):
data = {}
def handler(response):
data['res'] = response
data["res"] = response
RawRevision(channel, {'node': node}, handler).wait()
RawRevision(channel, {"node": node}, handler).wait()
return data['res']
return data["res"]
class FileInfo(Mercurial):
"""Connection to get file info
"""
def __init__(self, channel='nightly', params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self,
channel="nightly",
params=None,
handler=None,
handlerdata=None,
queries=None,
**kwargs
):
"""Constructor
Args:
@ -183,7 +206,9 @@ class FileInfo(Mercurial):
if queries:
super(FileInfo, self).__init__(queries, **kwargs)
else:
super(FileInfo, self).__init__(Query(FileInfo.get_url(channel), params, handler, handlerdata), **kwargs)
super(FileInfo, self).__init__(
Query(FileInfo.get_url(channel), params, handler, handlerdata), **kwargs
)
@staticmethod
def get_url(channel):
@ -195,7 +220,7 @@ class FileInfo(Mercurial):
Returns:
str: the api url
"""
return Mercurial.get_repo_url(channel) + '/json-filelog'
return Mercurial.get_repo_url(channel) + "/json-filelog"
@staticmethod
def default_handler(json, data):
@ -208,7 +233,7 @@ class FileInfo(Mercurial):
data.update(json)
@staticmethod
def get(paths, channel='nightly', node='default'):
def get(paths, channel="nightly", node="default"):
"""Get the file info for several paths
Args:
@ -221,20 +246,24 @@ class FileInfo(Mercurial):
"""
data = {}
__base = {'node': node,
'file': None}
__base = {"node": node, "file": None}
if isinstance(paths, six.string_types):
__base['file'] = paths
__base["file"] = paths
_dict = {}
data[paths] = _dict
FileInfo(channel=channel, params=__base, handler=FileInfo.default_handler, handlerdata=_dict).wait()
FileInfo(
channel=channel,
params=__base,
handler=FileInfo.default_handler,
handlerdata=_dict,
).wait()
else:
url = FileInfo.get_url(channel)
queries = []
for path in paths:
cparams = __base.copy()
cparams['file'] = path
cparams["file"] = path
_dict = {}
data[path] = _dict
queries.append(Query(url, cparams, FileInfo.default_handler, _dict))
@ -247,7 +276,15 @@ class Annotate(Mercurial):
"""Connection to get file annotation (blame)
"""
def __init__(self, channel='nightly', params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self,
channel="nightly",
params=None,
handler=None,
handlerdata=None,
queries=None,
**kwargs
):
"""Constructor
Args:
@ -260,7 +297,9 @@ class Annotate(Mercurial):
if queries:
super(Annotate, self).__init__(queries, **kwargs)
else:
super(Annotate, self).__init__(Query(Annotate.get_url(channel), params, handler, handlerdata), **kwargs)
super(Annotate, self).__init__(
Query(Annotate.get_url(channel), params, handler, handlerdata), **kwargs
)
@staticmethod
def get_url(channel):
@ -272,7 +311,7 @@ class Annotate(Mercurial):
Returns:
str: the api url
"""
return Mercurial.get_repo_url(channel) + '/json-annotate'
return Mercurial.get_repo_url(channel) + "/json-annotate"
@staticmethod
def default_handler(json, data):
@ -285,7 +324,7 @@ class Annotate(Mercurial):
data.update(json)
@staticmethod
def get(paths, channel='nightly', node='default'):
def get(paths, channel="nightly", node="default"):
"""Get the annotated files for several paths
Args:
@ -298,20 +337,24 @@ class Annotate(Mercurial):
"""
data = {}
__base = {'node': node,
'file': None}
__base = {"node": node, "file": None}
if isinstance(paths, six.string_types):
__base['file'] = paths
__base["file"] = paths
_dict = {}
data[paths] = _dict
Annotate(channel=channel, params=__base, handler=Annotate.default_handler, handlerdata=_dict).wait()
Annotate(
channel=channel,
params=__base,
handler=Annotate.default_handler,
handlerdata=_dict,
).wait()
else:
url = Annotate.get_url(channel)
queries = []
for path in paths:
cparams = __base.copy()
cparams['file'] = path
cparams["file"] = path
_dict = {}
data[path] = _dict
queries.append(Query(url, cparams, Annotate.default_handler, _dict))

Просмотреть файл

@ -2,14 +2,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import json
import fnmatch
import argparse
import fnmatch
import json
import os
with open(os.path.join(os.path.dirname(__file__), 'modules.json')) as f:
with open(os.path.join(os.path.dirname(__file__), "modules.json")) as f:
data = json.load(f)
MODULES = [module['name'] for module in data]
MODULES = [module["name"] for module in data]
def __match(path, pattern):
@ -17,56 +17,63 @@ def __match(path, pattern):
path = os.path.normpath(path)
# If the pattern contains a '*', assume the pattern is correct and we don't have to modify it.
if '*' in pattern:
if "*" in pattern:
return fnmatch.fnmatch(path, pattern)
# If the pattern contains a '.', assume it is a specific file.
elif '.' in pattern:
elif "." in pattern:
return path == pattern
# Otherwise, assume the pattern is a directory and add a '*' to match all its children.
else:
return fnmatch.fnmatch(path, pattern + '*')
return fnmatch.fnmatch(path, pattern + "*")
def module_from_path(path):
maxCommon = dict(
module=None,
directory=''
)
maxCommon = dict(module=None, directory="")
for module in data:
for directory in module['sourceDirs']:
if (len(os.path.commonprefix([path, directory])) > len(os.path.commonprefix([path, maxCommon['directory']]))) and\
__match(path, directory):
maxCommon['module'] = module
maxCommon['directory'] = directory
for directory in module["sourceDirs"]:
if (
len(os.path.commonprefix([path, directory]))
> len(os.path.commonprefix([path, maxCommon["directory"]]))
) and __match(path, directory):
maxCommon["module"] = module
maxCommon["directory"] = directory
# If we couldn't pinpoint the module, use some heuristics.
if maxCommon['module'] is None:
if path.endswith('configure.in') or path.endswith('moz.build') or path.endswith('client.mk') or path.endswith('moz.configure') or path.endswith('aclocal.m4') or path.endswith('Makefile.in') or path.startswith('python/mach'):
return module_info('Build Config')
if maxCommon["module"] is None:
if (
path.endswith("configure.in")
or path.endswith("moz.build")
or path.endswith("client.mk")
or path.endswith("moz.configure")
or path.endswith("aclocal.m4")
or path.endswith("Makefile.in")
or path.startswith("python/mach")
):
return module_info("Build Config")
if path.startswith('js/'):
return module_info('JavaScript')
if path.startswith("js/"):
return module_info("JavaScript")
if path.startswith('security/'):
return module_info('security')
if path.startswith("security/"):
return module_info("security")
if path.startswith('tools/profiler/'):
return module_info('Code Analysis and Debugging Tools')
if path.startswith("tools/profiler/"):
return module_info("Code Analysis and Debugging Tools")
return maxCommon['module']
return maxCommon["module"]
def module_info(moduleName):
for module in data:
if module['name'].lower() == moduleName.lower():
if module["name"].lower() == moduleName.lower():
return module
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Mozilla Modules')
parser.add_argument('-p', '--path', action='store', help='the path to the file')
parser.add_argument('-m', '--module', action='store', help='the module name')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Mozilla Modules")
parser.add_argument("-p", "--path", action="store", help="the path to the file")
parser.add_argument("-m", "--module", action="store", help="the module name")
args = parser.parse_args()

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -5,41 +5,39 @@
import enum
import functools
import logging
import json
import logging
from collections import OrderedDict
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import urlencode, urlparse
import hglib
import requests
HGMO_JSON_REV_URL_TEMPLATE = 'https://hg.mozilla.org/mozilla-central/json-rev/{}'
MOZILLA_PHABRICATOR_PROD = 'https://phabricator.services.mozilla.com/api/'
HGMO_JSON_REV_URL_TEMPLATE = "https://hg.mozilla.org/mozilla-central/json-rev/{}"
MOZILLA_PHABRICATOR_PROD = "https://phabricator.services.mozilla.com/api/"
logger = logging.getLogger(__name__)
class BuildState(enum.Enum):
Work = 'work'
Pass = 'pass'
Fail = 'fail'
Work = "work"
Pass = "pass"
Fail = "fail"
class UnitResultState(enum.Enum):
Pass = 'pass'
Fail = 'fail'
Skip = 'skip'
Broken = 'broken'
Unsound = 'unsound'
Pass = "pass"
Fail = "fail"
Skip = "skip"
Broken = "broken"
Unsound = "unsound"
class ArtifactType(enum.Enum):
Host = 'host'
WorkingCopy = 'working-copy'
File = 'file'
Uri = 'uri'
Host = "host"
WorkingCopy = "working-copy"
File = "file"
Uri = "uri"
@functools.lru_cache(maxsize=2048)
@ -51,9 +49,9 @@ def revision_exists_on_central(revision):
def revision_available(repo, revision):
'''
"""
Check if a revision is available on a Mercurial repo
'''
"""
try:
repo.identify(revision)
return True
@ -62,66 +60,82 @@ def revision_available(repo, revision):
def as_list(name, value, value_type):
'''
"""
Helper to convert Phabricator inputs to list
Supports unique and multiple values, checking their type
'''
"""
if isinstance(value, value_type):
return [value, ]
return [value]
elif isinstance(value, list):
assert all(map(lambda v: isinstance(v, value_type), value)), \
'All values in {} should be of type {}'.format(name, value_type)
assert all(
map(lambda v: isinstance(v, value_type), value)
), "All values in {} should be of type {}".format(name, value_type)
return value
else:
raise Exception('{0} must be a {1} or a list of {1}'.format(name, value_type))
raise Exception("{0} must be a {1} or a list of {1}".format(name, value_type))
# Descriptions of the fields are available at
# https://phabricator.services.mozilla.com/conduit/method/harbormaster.sendmessage/,
# in the "Lint Results" paragraph.
class LintResult(dict):
def __init__(self, name, code, severity, path, line=None, char=None, description=None):
self['name'] = name
self['code'] = code
self['severity'] = severity
self['path'] = path
def __init__(
self, name, code, severity, path, line=None, char=None, description=None
):
self["name"] = name
self["code"] = code
self["severity"] = severity
self["path"] = path
if line is not None:
self['line'] = line
self["line"] = line
if char is not None:
self['char'] = char
self["char"] = char
if description is not None:
self['description'] = description
self["description"] = description
self.validates()
def validates(self):
'''
"""
Check the input is a lint issue compatible with Phabricator
'''
"""
# Check required strings
for key in ('name', 'code', 'severity', 'path'):
assert isinstance(self[key], str), '{} should be a string'.format(key)
for key in ("name", "code", "severity", "path"):
assert isinstance(self[key], str), "{} should be a string".format(key)
# Check the severity is a valid value
assert self['severity'] in ('advice', 'autofix', 'warning', 'error', 'disabled'), \
'Invalid severity value: {}'.format(self['severity'])
assert self["severity"] in (
"advice",
"autofix",
"warning",
"error",
"disabled",
), "Invalid severity value: {}".format(self["severity"])
# Check optional integers
for key in ('line', 'char'):
for key in ("line", "char"):
value = self.get(key)
if value:
assert isinstance(value, int), '{} should be an int'.format(key)
assert isinstance(value, int), "{} should be an int".format(key)
return True
class UnitResult(dict):
def __init__(self, name, result, **kwargs):
self['name'] = name
assert isinstance(result, UnitResultState), 'result must be a UnitResultState'
self['result'] = result.value
self["name"] = name
assert isinstance(result, UnitResultState), "result must be a UnitResultState"
self["result"] = result.value
for key in ('namespace', 'engine', 'duration', 'path', 'coverage', 'details', 'format'):
for key in (
"namespace",
"engine",
"duration",
"path",
"coverage",
"details",
"format",
):
value = kwargs.get(key)
if value is not None:
self[key] = value
@ -129,25 +143,26 @@ class UnitResult(dict):
self.validates()
def validates(self):
'''
"""
Check the input is a unit result compatible with Phabricator
'''
"""
# Check name
assert isinstance(self['name'], str), 'name should be a string'
assert isinstance(self["name"], str), "name should be a string"
# Check special optional types
if 'duration' in self:
assert isinstance(self['duration'], (float, int)), \
'Duration should be an int or float'
if 'coverage' in self:
assert isinstance(self['coverage'], dict), 'Coverage should be a dict'
if 'format' in self:
assert self['format'] in ('text', 'remarkup'), 'Invalid format value'
if "duration" in self:
assert isinstance(
self["duration"], (float, int)
), "Duration should be an int or float"
if "coverage" in self:
assert isinstance(self["coverage"], dict), "Coverage should be a dict"
if "format" in self:
assert self["format"] in ("text", "remarkup"), "Invalid format value"
# Check optional strings
for key in ('namespace', 'engine', 'path', 'details'):
for key in ("namespace", "engine", "path", "details"):
if key in self:
assert isinstance(self[key], str), '{} should be a string'.format(key)
assert isinstance(self[key], str), "{} should be a string".format(key)
class PhabricatorRevisionNotFoundException(Exception):
@ -155,175 +170,178 @@ class PhabricatorRevisionNotFoundException(Exception):
class ConduitError(Exception):
'''
"""
Exception to be raised when Phabricator returns an error response.
'''
"""
def __init__(self, msg, error_code=None, error_info=None):
super(ConduitError, self).__init__(msg)
self.error_code = error_code
self.error_info = error_info
logger.warn('Conduit API error {} : {}'.format(
self.error_code,
self.error_info or 'unknown'
))
logger.warn(
"Conduit API error {} : {}".format(
self.error_code, self.error_info or "unknown"
)
)
@classmethod
def raise_if_error(cls, response_body):
'''
"""
Raise a ConduitError if the provided response_body was an error.
'''
if response_body['error_code'] is not None:
"""
if response_body["error_code"] is not None:
raise cls(
response_body.get('error_info'),
error_code=response_body.get('error_code'),
error_info=response_body.get('error_info')
response_body.get("error_info"),
error_code=response_body.get("error_code"),
error_info=response_body.get("error_info"),
)
class PhabricatorAPI(object):
'''
"""
Phabricator Rest API client
'''
"""
def __init__(self, api_key, url=MOZILLA_PHABRICATOR_PROD):
self.api_key = api_key
self.url = url
assert self.url.endswith('/api/'), \
'Phabricator API must end with /api/'
assert self.url.endswith("/api/"), "Phabricator API must end with /api/"
# Test authentication
self.user = self.request('user.whoami')
logger.info('Authenticated on {} as {}'.format(self.url, self.user['realName']))
self.user = self.request("user.whoami")
logger.info("Authenticated on {} as {}".format(self.url, self.user["realName"]))
@property
def hostname(self):
parts = urlparse(self.url)
return parts.netloc
def search_diffs(self, diff_phid=None, diff_id=None, revision_phid=None, output_cursor=False, **params):
'''
def search_diffs(
self,
diff_phid=None,
diff_id=None,
revision_phid=None,
output_cursor=False,
**params
):
"""
Find details of differential diffs from a Differential diff or revision
Multiple diffs can be returned (when using revision_phid)
'''
"""
constraints = {}
if diff_phid is not None:
constraints['phids'] = as_list('diff_phid', diff_phid, str)
constraints["phids"] = as_list("diff_phid", diff_phid, str)
if diff_id is not None:
constraints['ids'] = as_list('diff_id', diff_id, int)
constraints["ids"] = as_list("diff_id", diff_id, int)
if revision_phid is not None:
constraints['revisionPHIDs'] = as_list('revision_phid', revision_phid, str)
out = self.request('differential.diff.search', constraints=constraints, **params)
constraints["revisionPHIDs"] = as_list("revision_phid", revision_phid, str)
out = self.request(
"differential.diff.search", constraints=constraints, **params
)
def _clean(diff):
# Make all fields easily accessible
if 'fields' in diff and isinstance(diff['fields'], dict):
diff.update(diff['fields'])
del diff['fields']
if "fields" in diff and isinstance(diff["fields"], dict):
diff.update(diff["fields"])
del diff["fields"]
# Lookup base revision in refs
diff['refs'] = {
ref['type']: ref
for ref in diff['refs']
}
diff["refs"] = {ref["type"]: ref for ref in diff["refs"]}
try:
diff['baseRevision'] = diff['refs']['base']['identifier']
diff["baseRevision"] = diff["refs"]["base"]["identifier"]
except KeyError:
diff['baseRevision'] = None
diff["baseRevision"] = None
return diff
diffs = list(map(_clean, out['data']))
diffs = list(map(_clean, out["data"]))
if output_cursor is True:
return diffs, out['cursor']
return diffs, out["cursor"]
return diffs
def load_raw_diff(self, diff_id):
'''
"""
Load the raw diff content
'''
return self.request(
'differential.getrawdiff',
diffID=diff_id,
)
"""
return self.request("differential.getrawdiff", diffID=diff_id)
def load_revision(self, rev_phid=None, rev_id=None, **params):
'''
"""
Find details of a differential revision
'''
assert (rev_phid is not None) ^ (rev_id is not None), 'One and only one of rev_phid or rev_id should be passed'
"""
assert (rev_phid is not None) ^ (
rev_id is not None
), "One and only one of rev_phid or rev_id should be passed"
constraints = {}
if rev_id is not None:
constraints['ids'] = [rev_id, ]
constraints["ids"] = [rev_id]
if rev_phid is not None:
constraints['phids'] = [rev_phid, ]
constraints["phids"] = [rev_phid]
out = self.request(
'differential.revision.search',
constraints=constraints,
**params
"differential.revision.search", constraints=constraints, **params
)
data = out['data']
data = out["data"]
if len(data) != 1:
raise PhabricatorRevisionNotFoundException()
return data[0]
def list_repositories(self):
'''
"""
List available repositories
'''
out = self.request('diffusion.repository.search')
return out['data']
"""
out = self.request("diffusion.repository.search")
return out["data"]
def list_comments(self, revision_phid):
'''
"""
List and format existing inline comments for a revision
'''
"""
transactions = self.request(
'transaction.search',
objectIdentifier=revision_phid,
"transaction.search", objectIdentifier=revision_phid
)
return [
{
'diffID': transaction['fields']['diff']['id'],
'filePath': transaction['fields']['path'],
'lineNumber': transaction['fields']['line'],
'lineLength': transaction['fields']['length'],
'content': comment['content']['raw'],
"diffID": transaction["fields"]["diff"]["id"],
"filePath": transaction["fields"]["path"],
"lineNumber": transaction["fields"]["line"],
"lineLength": transaction["fields"]["length"],
"content": comment["content"]["raw"],
}
for transaction in transactions['data']
for comment in transaction['comments']
if transaction['type'] == 'inline' and transaction['authorPHID'] == self.user['phid']
for transaction in transactions["data"]
for comment in transaction["comments"]
if transaction["type"] == "inline"
and transaction["authorPHID"] == self.user["phid"]
]
def comment(self, revision_id, message):
'''
"""
Comment on a Differential revision
Using a frozen method as new transactions does not
seem to support inlines publication
'''
"""
return self.request(
'differential.createcomment',
"differential.createcomment",
revision_id=revision_id,
message=message,
attach_inlines=1,
)
def load_parents(self, revision_phid):
'''
"""
Recursively load parents from a stack of revision
'''
parents, phids = [], [revision_phid, ]
"""
parents, phids = [], [revision_phid]
while phids:
phid = phids.pop()
out = self.request(
'edge.search',
types=['revision.parent', ],
sourcePHIDs=[phid, ]
"edge.search", types=["revision.parent"], sourcePHIDs=[phid]
)
for element in out['data']:
rev = element['destinationPHID']
for element in out["data"]:
rev = element["destinationPHID"]
if rev in parents:
break
@ -333,118 +351,113 @@ class PhabricatorAPI(object):
return parents
def load_or_create_build_autotarget(self, object_phid, target_keys):
'''
"""
Retrieve or create a build autotarget.
'''
"""
res = self.request(
'harbormaster.queryautotargets',
"harbormaster.queryautotargets",
objectPHID=object_phid,
targetKeys=target_keys
targetKeys=target_keys,
)
return res['targetMap']
return res["targetMap"]
def search_buildable(self, object_phid=None, buildable_phid=None):
'''
"""
Search HarborMaster buildables linked to an object (diff, revision, ...)
'''
assert (object_phid is not None) or (buildable_phid is not None), \
'Specify object_phid or buildable_phid'
"""
assert (object_phid is not None) or (
buildable_phid is not None
), "Specify object_phid or buildable_phid"
constraints = {}
if object_phid is not None:
constraints['objectPHIDs'] = [object_phid, ]
constraints["objectPHIDs"] = [object_phid]
if buildable_phid is not None:
constraints['phids'] = [buildable_phid, ]
out = self.request(
'harbormaster.buildable.search',
constraints=constraints,
)
return out['data']
constraints["phids"] = [buildable_phid]
out = self.request("harbormaster.buildable.search", constraints=constraints)
return out["data"]
def search_build(self, build_phid=None, buildable_phid=None, plans=[]):
'''
"""
Search HarborMaster build for a buildable
Supports HarborMaster Build Plan filtering
'''
assert (build_phid is not None) or (buildable_phid is not None), \
'Specify build_phid or buildable_phid'
"""
assert (build_phid is not None) or (
buildable_phid is not None
), "Specify build_phid or buildable_phid"
constraints = {}
if build_phid is not None:
constraints['phids'] = [build_phid, ]
constraints["phids"] = [build_phid]
if buildable_phid is not None:
constraints['buildables'] = [buildable_phid, ]
constraints["buildables"] = [buildable_phid]
if plans:
constraints['plans'] = plans
out = self.request(
'harbormaster.build.search',
constraints=constraints,
)
return out['data']
constraints["plans"] = plans
out = self.request("harbormaster.build.search", constraints=constraints)
return out["data"]
def search_build_target(self, build_phid=None, build_target_phid=None):
'''
"""
Search HarborMaster build targets for a build
'''
assert (build_phid is not None) or (build_target_phid is not None), \
'Specify build_phid or build_target_phid'
"""
assert (build_phid is not None) or (
build_target_phid is not None
), "Specify build_phid or build_target_phid"
constraints = {}
if build_phid is not None:
constraints['buildPHIDs'] = [build_phid, ]
constraints["buildPHIDs"] = [build_phid]
if build_target_phid is not None:
constraints['phids'] = [build_target_phid, ]
constraints["phids"] = [build_target_phid]
out = self.request(
'harbormaster.target.search',
constraints=constraints,
)
return out['data']
out = self.request("harbormaster.target.search", constraints=constraints)
return out["data"]
def find_diff_build(self, object_phid, build_plan_phid):
'''
"""
Find a specific build and its targets for a Diff and an HarborMaster build plan
'''
"""
assert isinstance(object_phid, str)
assert object_phid[0:10] in ('PHID-DIFF-', 'PHID-DREV-')
assert build_plan_phid.startswith('PHID-HMCP-')
assert object_phid[0:10] in ("PHID-DIFF-", "PHID-DREV-")
assert build_plan_phid.startswith("PHID-HMCP-")
# First find the buildable for this diff
buildables = self.search_buildable(object_phid=object_phid)
assert len(buildables) == 1
buildable = buildables[0]
logger.info('Found HarborMaster buildable id={id} phid={phid}'.format(**buildable))
logger.info(
"Found HarborMaster buildable id={id} phid={phid}".format(**buildable)
)
# Then find the build in that buildable & plan
builds = self.search_build(
buildable_phid=buildable['phid'],
plans=[build_plan_phid, ]
buildable_phid=buildable["phid"], plans=[build_plan_phid]
)
assert len(buildables) == 1
build = builds[0]
logger.info('Found HarborMaster build id={id} phid={phid}'.format(**build))
logger.info("Found HarborMaster build id={id} phid={phid}".format(**build))
# Finally look for the build targets
targets = self.search_build_target(build_phid=build['phid'])
logger.info('Found {} HarborMaster build targets'.format(len(targets)))
targets = self.search_build_target(build_phid=build["phid"])
logger.info("Found {} HarborMaster build targets".format(len(targets)))
return build, targets
def find_target_buildable(self, build_target_phid):
'''
"""
Find a Phabricator buildable from its build target
'''
"""
assert isinstance(build_target_phid, str)
assert build_target_phid.startswith('PHID-HMBT-')
assert build_target_phid.startswith("PHID-HMBT-")
# First lookup the target
targets = self.search_build_target(build_target_phid=build_target_phid)
assert len(targets) == 1, 'Build target not found'
build_phid = targets[0]['fields']['buildPHID']
logger.info('Found HarborMaster build {}'.format(build_phid))
assert len(targets) == 1, "Build target not found"
build_phid = targets[0]["fields"]["buildPHID"]
logger.info("Found HarborMaster build {}".format(build_phid))
# Then lookup the build
builds = self.search_build(build_phid=build_phid)
assert len(builds) == 1
buildable_phid = builds[0]['fields']['buildablePHID']
logger.info('Found HarborMaster buildable {}'.format(buildable_phid))
buildable_phid = builds[0]["fields"]["buildablePHID"]
logger.info("Found HarborMaster buildable {}".format(buildable_phid))
# Finally load the buidable
buildables = self.search_buildable(buildable_phid=buildable_phid)
@ -452,55 +465,59 @@ class PhabricatorAPI(object):
return buildables[0]
def update_build_target(self, build_target_phid, state, unit=[], lint=[]):
'''
"""
Update unit test / linting data for a given build target.
'''
assert all(map(lambda i: isinstance(i, LintResult), lint)), \
'Only support LintResult instances'
assert all(map(lambda i: isinstance(i, UnitResult), unit)), \
'Only support UnitResult instances'
"""
assert all(
map(lambda i: isinstance(i, LintResult), lint)
), "Only support LintResult instances"
assert all(
map(lambda i: isinstance(i, UnitResult), unit)
), "Only support UnitResult instances"
assert isinstance(state, BuildState)
return self.request(
'harbormaster.sendmessage',
"harbormaster.sendmessage",
buildTargetPHID=build_target_phid,
type=state.value,
unit=unit,
lint=lint,
)
def create_harbormaster_artifact(self, build_target_phid, artifact_type, key, payload):
'''
def create_harbormaster_artifact(
self, build_target_phid, artifact_type, key, payload
):
"""
Create an artifact on HarborMaster
'''
"""
assert isinstance(artifact_type, ArtifactType)
assert isinstance(payload, dict)
return self.request(
'harbormaster.createartifact',
"harbormaster.createartifact",
buildTargetPHID=build_target_phid,
artifactType=artifact_type.value,
artifactKey=key,
artifactData=payload,
)
def create_harbormaster_uri(self, build_target_phid, artifact_key, name, uri, external=True):
'''
def create_harbormaster_uri(
self, build_target_phid, artifact_key, name, uri, external=True
):
"""
Helper to create a URI Harbormaster Artifact
'''
"""
out = self.create_harbormaster_artifact(
build_target_phid=build_target_phid,
artifact_type=ArtifactType.Uri,
key=artifact_key,
payload={
'uri': uri,
'name': name,
'ui.external': external,
},
payload={"uri": uri, "name": name, "ui.external": external},
)
logger.info(
"Created HarborMaster link on {} : {}".format(build_target_phid, uri)
)
logger.info('Created HarborMaster link on {} : {}'.format(build_target_phid, uri))
return out
def upload_coverage_results(self, object_phid, coverage_data):
'''
"""
Upload code coverage results to a Phabricator object.
`coverage_data` is an object in the format:
@ -517,135 +534,125 @@ class PhabricatorAPI(object):
- N means "not executable";
- C means "covered";
- X means that no data is available about that line.
'''
"""
# TODO: We are temporarily using arcanist.unit, but we should switch to something
# different after https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved.
res = self.load_or_create_build_autotarget(object_phid, ['arcanist.unit'])
build_target_phid = res['arcanist.unit']
res = self.load_or_create_build_autotarget(object_phid, ["arcanist.unit"])
build_target_phid = res["arcanist.unit"]
self.update_build_target(
build_target_phid,
BuildState.Pass,
unit=[
UnitResult(
name='Aggregate coverage information',
name="Aggregate coverage information",
result=UnitResultState.Pass,
coverage=coverage_data,
)
]
],
)
def upload_lint_results(self, object_phid, state, lint_data):
'''
"""
Upload linting/static analysis results to a Phabricator object.
`type` is either "pass" if no errors were found, "fail" otherwise.
`lint_data` is an array of LintResult objects.
'''
"""
assert isinstance(state, BuildState)
# TODO: We are temporarily using arcanist.lint, but we should switch to something
# different after https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved.
res = self.load_or_create_build_autotarget(object_phid, ['arcanist.lint'])
build_target_phid = res['arcanist.lint']
res = self.load_or_create_build_autotarget(object_phid, ["arcanist.lint"])
build_target_phid = res["arcanist.lint"]
self.update_build_target(
build_target_phid,
state,
lint=lint_data,
)
self.update_build_target(build_target_phid, state, lint=lint_data)
def search_projects(self, slugs=None, **params):
'''
"""
Search Phabricator projects descriptions
'''
"""
constraints = {}
if slugs:
constraints['slugs'] = slugs
out = self.request(
'project.search',
constraints=constraints,
**params
)
return out['data']
constraints["slugs"] = slugs
out = self.request("project.search", constraints=constraints, **params)
return out["data"]
def request(self, path, **payload):
'''
"""
Send a request to Phabricator API
'''
"""
# Add api token to payload
payload['__conduit__'] = {
'token': self.api_key,
}
payload["__conduit__"] = {"token": self.api_key}
# Run POST request on api
response = requests.post(
self.url + path,
data=urlencode({
'params': json.dumps(payload),
'output': 'json'
}),
data=urlencode({"params": json.dumps(payload), "output": "json"}),
)
# Check response
data = response.json()
assert response.ok
assert 'error_code' in data
assert "error_code" in data
ConduitError.raise_if_error(data)
# Outputs result
assert 'result' in data
return data['result']
assert "result" in data
return data["result"]
def load_patches_stack(self, repo, diff, default_revision='central'):
'''
def load_patches_stack(self, repo, diff, default_revision="central"):
"""
Load full stack of patches from Phabricator into a mercurial repository:
* uses a diff dict from search_diffs
* setup repo to base revision from Mozilla Central
* Apply previous needed patches from Phabricator
'''
"""
assert isinstance(repo, hglib.client.hgclient)
assert isinstance(diff, dict)
assert 'phid' in diff
assert 'id' in diff
assert 'revisionPHID' in diff
assert 'baseRevision' in diff
assert "phid" in diff
assert "id" in diff
assert "revisionPHID" in diff
assert "baseRevision" in diff
# Diff PHIDs from our patch to its base
patches = OrderedDict()
patches[diff['phid']] = diff['id']
patches[diff["phid"]] = diff["id"]
parents = self.load_parents(diff['revisionPHID'])
parents = self.load_parents(diff["revisionPHID"])
hg_base = None
if parents:
# Load all parent diffs
for parent in parents:
logger.info('Loading parent diff {}'.format(parent))
logger.info("Loading parent diff {}".format(parent))
# Sort parent diffs by their id to load the most recent patch
parent_diffs = sorted(
self.search_diffs(revision_phid=parent),
key=lambda x: x['id'],
self.search_diffs(revision_phid=parent), key=lambda x: x["id"]
)
last_diff = parent_diffs[-1]
patches[last_diff['phid']] = last_diff['id']
patches[last_diff["phid"]] = last_diff["id"]
# Use parent until a base revision is available in the repository
# This is needed to support stack of patches with already merged patches
diff_base = last_diff['baseRevision']
diff_base = last_diff["baseRevision"]
if revision_available(repo, diff_base):
logger.info('Found a parent with landed revision {}, stopping stack here'.format(diff_base))
logger.info(
"Found a parent with landed revision {}, stopping stack here".format(
diff_base
)
)
hg_base = diff_base
break
else:
# Use base revision from top diff
hg_base = diff['baseRevision']
hg_base = diff["baseRevision"]
# When base revision is missing, update to default revision
if hg_base is None or not revision_available(repo, hg_base):
logger.warning('Missing base revision {} from Phabricator'.format(hg_base))
logger.warning("Missing base revision {} from Phabricator".format(hg_base))
hg_base = default_revision
# Load all patches from their numerical ID
@ -654,18 +661,15 @@ class PhabricatorAPI(object):
# Update the repo to base revision
try:
logger.info('Updating repo to revision {}'.format(hg_base))
repo.update(
rev=hg_base,
clean=True,
)
logger.info("Updating repo to revision {}".format(hg_base))
repo.update(rev=hg_base, clean=True)
except hglib.error.CommandError:
raise Exception('Failed to update to revision {}'.format(hg_base))
raise Exception("Failed to update to revision {}".format(hg_base))
# Get current revision using full informations tuple from hglib
# Get current revision using full information tuple from hglib
revision = repo.identify(id=True).strip()
revision = repo.log(revision, limit=1)[0]
logger.info('Updated repo to revision {}'.format(revision.node))
logger.info("Updated repo to revision {}".format(revision.node))
# Outputs base revision and patches from the bottom one up to the target
return (revision, list(reversed(patches.items())))

Просмотреть файл

@ -2,22 +2,23 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import six
import functools
import re
from datetime import timedelta
from .connection import (Connection, Query)
from . import utils
from . import config
import six
from . import config, utils
from .connection import Connection, Query
class Redash(Connection):
"""re:dash connection: https://sql.telemetry.mozilla.org
"""
RE_DASH_URL = config.get('Re:dash', 'URL', 'https://sql.telemetry.mozilla.org')
API_URL = RE_DASH_URL + '/api/queries'
TOKEN = config.get('Re:dash', 'token', '')
RE_DASH_URL = config.get("Re:dash", "URL", "https://sql.telemetry.mozilla.org")
API_URL = RE_DASH_URL + "/api/queries"
TOKEN = config.get("Re:dash", "token", "")
def __init__(self, queries):
"""Constructor
@ -29,7 +30,7 @@ class Redash(Connection):
def get_header(self):
header = super(Redash, self).get_header()
header['Authorization'] = 'Key %s' % self.get_apikey()
header["Authorization"] = "Key %s" % self.get_apikey()
return header
@staticmethod
@ -45,8 +46,8 @@ class Redash(Connection):
@staticmethod
def __get_rows(channel, versions, rows):
if channel == 'beta':
pat = re.compile(r'([0-9]+\.0)b[0-9]+')
if channel == "beta":
pat = re.compile(r"([0-9]+\.0)b[0-9]+")
_versions = set()
for v in versions:
m = pat.match(v)
@ -56,7 +57,7 @@ class Redash(Connection):
_versions = set(versions)
majors = set()
pat_major = re.compile(r'([0-9]+)')
pat_major = re.compile(r"([0-9]+)")
for v in versions:
m = pat_major.match(v)
if m:
@ -64,11 +65,11 @@ class Redash(Connection):
_rows = []
for row in rows:
if row['channel'] == channel:
if 'build_version' not in row:
if row["channel"] == channel:
if "build_version" not in row:
continue
v = row['build_version']
v = row["build_version"]
if not v:
continue
@ -94,13 +95,27 @@ class Redash(Connection):
"""
data = {}
if isinstance(query_ids, six.string_types):
url = Redash.API_URL + '/' + query_ids + '/results.json'
Redash(Query(url, None, functools.partial(Redash.default_handler, query_ids), data)).wait()
url = Redash.API_URL + "/" + query_ids + "/results.json"
Redash(
Query(
url,
None,
functools.partial(Redash.default_handler, query_ids),
data,
)
).wait()
else:
queries = []
url = Redash.API_URL + '/%s/results.json'
url = Redash.API_URL + "/%s/results.json"
for query_id in query_ids:
queries.append(Query(url % query_id, None, functools.partial(Redash.default_handler, query_id), data))
queries.append(
Query(
url % query_id,
None,
functools.partial(Redash.default_handler, query_id),
data,
)
)
Redash(queries=queries).wait()
return data
@ -119,10 +134,10 @@ class Redash(Connection):
Returns:
dict: containing result in json for each query
"""
qid = '387' if product == 'FennecAndroid' else '346'
qid = "387" if product == "FennecAndroid" else "346"
khours = Redash.get(qid)
rows = khours[qid]['query_result']['data']['rows']
rows = khours[qid]["query_result"]["data"]["rows"]
res = {}
start_date = utils.get_date_ymd(start_date)
@ -131,14 +146,14 @@ class Redash(Connection):
# init the data
duration = (end_date - start_date).days
for i in range(duration + 1):
res[start_date + timedelta(i)] = 0.
res[start_date + timedelta(i)] = 0.0
rows = Redash.__get_rows(channel, versions, rows)
for row in rows:
d = utils.get_date_ymd(row['activity_date'])
d = utils.get_date_ymd(row["activity_date"])
if start_date <= d <= end_date:
res[d] += row['usage_khours']
res[d] += row["usage_khours"]
return res
@ -156,16 +171,12 @@ class Redash(Connection):
Returns:
dict: containing result in json for each query
"""
qid = '400' if product == 'FennecAndroid' else '399'
qid = "400" if product == "FennecAndroid" else "399"
crashes = Redash.get(qid)
rows = crashes[qid]['query_result']['data']['rows']
rows = crashes[qid]["query_result"]["data"]["rows"]
res = {}
stats = {'m+c': 0.,
'main': 0.,
'content': 0.,
'plugin': 0.,
'all': 0.}
stats = {"m+c": 0.0, "main": 0.0, "content": 0.0, "plugin": 0.0, "all": 0.0}
start_date = utils.get_date_ymd(start_date)
end_date = utils.get_date_ymd(end_date)
@ -178,13 +189,13 @@ class Redash(Connection):
rows = Redash.__get_rows(channel, versions, rows)
for row in rows:
d = utils.get_date_ymd(row['date'])
d = utils.get_date_ymd(row["date"])
if d >= start_date and d <= end_date:
stats = res[d]
stats['m+c'] += row['main'] + row['content']
stats['main'] += row['main']
stats['content'] += row['content']
stats['plugin'] += row['plugin'] + row['gmplugin']
stats['all'] += row['total']
stats["m+c"] += row["main"] + row["content"]
stats["main"] += row["main"]
stats["content"] += row["content"]
stats["plugin"] += row["plugin"] + row["gmplugin"]
stats["all"] += row["total"]
return res

Просмотреть файл

@ -3,28 +3,28 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import requests
from . import utils
from .wiki_parser import InvalidWiki, WikiParser
CALENDAR_URL = 'https://wiki.mozilla.org/Release_Management/Calendar'
CALENDAR_URL = "https://wiki.mozilla.org/Release_Management/Calendar"
_CALENDAR = None
_ALL = None
def _get_sub_versions(s):
s = s.strip()
s = s.split('.')
return [int(v.split(' ')[0]) for v in s]
s = s.split(".")
return [int(v.split(" ")[0]) for v in s]
def get_versions(s):
fx = 'Firefox '
fx = "Firefox "
if not s.startswith(fx):
raise InvalidWiki('Invalid version format, expect: \"Firefox ...\"')
raise InvalidWiki('Invalid version format, expect: "Firefox ..."')
N = len(fx)
version = s[N:]
versions = version.split(';')
versions = version.split(";")
return [_get_sub_versions(v) for v in versions]
@ -33,36 +33,36 @@ def get_calendar():
if _CALENDAR is not None:
return _CALENDAR
html = requests.get(CALENDAR_URL).text.encode('ascii', errors='ignore')
html = requests.get(CALENDAR_URL).text.encode("ascii", errors="ignore")
parser = WikiParser(tables=[0])
try:
parser.feed(html)
except StopIteration:
table = parser.get_tables()[0]
if [
'Quarter',
'Soft Freeze',
'Merge Date',
'Central',
'Beta',
'Release Date',
'Release',
'ESR',
"Quarter",
"Soft Freeze",
"Merge Date",
"Central",
"Beta",
"Release Date",
"Release",
"ESR",
] != table[0]:
raise InvalidWiki('Column headers are wrong')
raise InvalidWiki("Column headers are wrong")
_CALENDAR = []
for row in table[1:]:
row = row[1:]
_CALENDAR.append(
{
'soft freeze': utils.get_date_ymd(row[0]),
'merge': utils.get_date_ymd(row[1]),
'central': get_versions(row[2])[0][0],
'beta': get_versions(row[3])[0][0],
'release date': utils.get_date_ymd(row[4]),
'release': get_versions(row[5])[0][0],
'esr': get_versions(row[6]),
"soft freeze": utils.get_date_ymd(row[0]),
"merge": utils.get_date_ymd(row[1]),
"central": get_versions(row[2])[0][0],
"beta": get_versions(row[3])[0][0],
"release date": utils.get_date_ymd(row[4]),
"release": get_versions(row[5])[0][0],
"esr": get_versions(row[6]),
}
)
return _CALENDAR
@ -70,10 +70,10 @@ def get_calendar():
def get_next_release_date():
cal = get_calendar()
now = utils.get_date_ymd('today')
now = utils.get_date_ymd("today")
for c in cal:
if now < c['release date']:
return c['release date']
if now < c["release date"]:
return c["release date"]
return None
@ -82,7 +82,7 @@ def get_all():
if _ALL is not None:
return _ALL
html = requests.get(CALENDAR_URL).text.encode('ascii', errors='ignore')
html = requests.get(CALENDAR_URL).text.encode("ascii", errors="ignore")
parser = WikiParser(tables=list(range(0, 10)))
try:
parser.feed(html)

Просмотреть файл

@ -3,17 +3,17 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import requests
from . import utils
from .wiki_parser import InvalidWiki, WikiParser
from .release_calendar import get_versions
from .wiki_parser import InvalidWiki, WikiParser
OWNERS_URL = 'https://wiki.mozilla.org/Release_Management/Release_owners'
OWNERS_URL = "https://wiki.mozilla.org/Release_Management/Release_owners"
_OWNERS = None
def _get_list_people(s):
return [x.strip() for x in s.split(',')]
return [x.strip() for x in s.split(",")]
def get_owners():
@ -21,22 +21,22 @@ def get_owners():
if _OWNERS is not None:
return _OWNERS
html = requests.get(OWNERS_URL).text.encode('ascii', errors='ignore')
html = requests.get(OWNERS_URL).text.encode("ascii", errors="ignore")
parser = WikiParser(tables=[0])
try:
parser.feed(html)
except StopIteration:
table = parser.get_tables()[0]
if [
'Firefox Version',
'Owner',
'Secondary',
'Engineering REO',
'Release Duty',
'Corresponding ESR',
'Release Date',
"Firefox Version",
"Owner",
"Secondary",
"Engineering REO",
"Release Duty",
"Corresponding ESR",
"Release Date",
] != table[0]:
raise InvalidWiki('Column headers are wrong')
raise InvalidWiki("Column headers are wrong")
_OWNERS = []
for row in table[1:]:
@ -48,13 +48,13 @@ def get_owners():
_OWNERS.append(
{
'version': get_versions(row[0])[0][0],
'owner': row[1],
'secondary': row[2],
'engineering reo': row[3],
'release duty': _get_list_people(row[4]),
'corresponding esr': row[5],
'release date': release_date,
"version": get_versions(row[0])[0][0],
"owner": row[1],
"secondary": row[2],
"engineering reo": row[3],
"release duty": _get_list_people(row[4]),
"corresponding esr": row[5],
"release date": release_date,
}
)
return _OWNERS

Просмотреть файл

@ -3,18 +3,18 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import six
from .connection import (Connection, Query)
from . import utils
from . import config
from . import config, utils
from .connection import Connection, Query
class Socorro(Connection):
"""Socorro connection: https://crash-stats.mozilla.com
"""
CRASH_STATS_URL = config.get('Socorro', 'URL', 'https://crash-stats.mozilla.com')
API_URL = CRASH_STATS_URL + '/api'
TOKEN = config.get('Socorro', 'token', '')
CRASH_STATS_URL = config.get("Socorro", "URL", "https://crash-stats.mozilla.com")
API_URL = CRASH_STATS_URL + "/api"
TOKEN = config.get("Socorro", "token", "")
def __init__(self, queries, **kwargs):
"""Constructor
@ -26,7 +26,7 @@ class Socorro(Connection):
def get_header(self):
header = super(Socorro, self).get_header()
header['Auth-Token'] = self.get_apikey()
header["Auth-Token"] = self.get_apikey()
return header
@ -34,11 +34,13 @@ class SuperSearch(Socorro):
"""SuperSearch: https://crash-stats.mozilla.com/search/?product=&_dont_run=1
"""
URL = Socorro.API_URL + '/SuperSearch/'
URL_UNREDACTED = Socorro.API_URL + '/SuperSearchUnredacted/'
WEB_URL = Socorro.CRASH_STATS_URL + '/search/'
URL = Socorro.API_URL + "/SuperSearch/"
URL_UNREDACTED = Socorro.API_URL + "/SuperSearchUnredacted/"
WEB_URL = Socorro.CRASH_STATS_URL + "/search/"
def __init__(self, params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self, params=None, handler=None, handlerdata=None, queries=None, **kwargs
):
"""Constructor
Args:
@ -52,23 +54,32 @@ class SuperSearch(Socorro):
else:
url = SuperSearch.URL
unredacted = False
if '_facets' in params:
facets = params['_facets']
if 'url' in facets or 'email' in facets:
if "_facets" in params:
facets = params["_facets"]
if "url" in facets or "email" in facets:
url = SuperSearch.URL_UNREDACTED
unredacted = True
if not unredacted and '_columns' in params:
columns = params['_columns']
if 'url' in columns or 'email' in columns:
if not unredacted and "_columns" in params:
columns = params["_columns"]
if "url" in columns or "email" in columns:
url = SuperSearch.URL_UNREDACTED
if not unredacted:
for k, v in params.items():
if 'url' in k or 'email' in k or ((isinstance(v, list) or isinstance(v, six.string_types)) and ('url' in v or 'email' in v)):
if (
"url" in k
or "email" in k
or (
(isinstance(v, list) or isinstance(v, six.string_types))
and ("url" in v or "email" in v)
)
):
url = SuperSearch.URL_UNREDACTED
unredacted = True
break
super(SuperSearch, self).__init__(Query(url, params, handler, handlerdata), **kwargs)
super(SuperSearch, self).__init__(
Query(url, params, handler, handlerdata), **kwargs
)
@staticmethod
def get_link(params):
@ -89,13 +100,13 @@ class SuperSearch(Socorro):
if end:
_end = utils.get_date_ymd(end)
today = utils.get_date_ymd('today')
today = utils.get_date_ymd("today")
if _end > today:
search_date = ['>=' + _start]
search_date = [">=" + _start]
else:
search_date = ['>=' + _start, '<' + utils.get_date_str(_end)]
search_date = [">=" + _start, "<" + utils.get_date_str(_end)]
else:
search_date = ['>=' + _start]
search_date = [">=" + _start]
return search_date
@ -104,9 +115,11 @@ class ProcessedCrash(Socorro):
"""ProcessedCrash: https://crash-stats.mozilla.com/api/#ProcessedCrash
"""
URL = Socorro.API_URL + '/ProcessedCrash/'
URL = Socorro.API_URL + "/ProcessedCrash/"
def __init__(self, params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self, params=None, handler=None, handlerdata=None, queries=None, **kwargs
):
"""Constructor
Args:
@ -118,7 +131,9 @@ class ProcessedCrash(Socorro):
if queries:
super(ProcessedCrash, self).__init__(queries, **kwargs)
else:
super(ProcessedCrash, self).__init__(Query(ProcessedCrash.URL, params, handler, handlerdata), **kwargs)
super(ProcessedCrash, self).__init__(
Query(ProcessedCrash.URL, params, handler, handlerdata), **kwargs
)
@staticmethod
def default_handler(json, data):
@ -142,22 +157,30 @@ class ProcessedCrash(Socorro):
"""
data = {}
__base = {'crash_id': None,
'datatype': 'processed'}
__base = {"crash_id": None, "datatype": "processed"}
if isinstance(crashids, six.string_types):
__base['crash_id'] = crashids
__base["crash_id"] = crashids
_dict = {}
data[crashids] = _dict
ProcessedCrash(params=__base, handler=ProcessedCrash.default_handler, handlerdata=_dict).wait()
ProcessedCrash(
params=__base, handler=ProcessedCrash.default_handler, handlerdata=_dict
).wait()
else:
queries = []
for crashid in crashids:
cparams = __base.copy()
cparams['crash_id'] = crashid
cparams["crash_id"] = crashid
_dict = {}
data[crashid] = _dict
queries.append(Query(ProcessedCrash.URL, cparams, ProcessedCrash.default_handler, _dict))
queries.append(
Query(
ProcessedCrash.URL,
cparams,
ProcessedCrash.default_handler,
_dict,
)
)
ProcessedCrash(queries=queries).wait()
return data
@ -167,9 +190,11 @@ class Bugs(Socorro):
"""Bugs: https://crash-stats.mozilla.com/api/#Bugs
"""
URL = Socorro.API_URL + '/Bugs/'
URL = Socorro.API_URL + "/Bugs/"
def __init__(self, params=None, handler=None, handlerdata=None, queries=None, **kwargs):
def __init__(
self, params=None, handler=None, handlerdata=None, queries=None, **kwargs
):
"""Constructor
Args:
@ -181,7 +206,9 @@ class Bugs(Socorro):
if queries:
super(Bugs, self).__init__(queries, **kwargs)
else:
super(Bugs, self).__init__(Query(Bugs.URL, params, handler, handlerdata), **kwargs)
super(Bugs, self).__init__(
Query(Bugs.URL, params, handler, handlerdata), **kwargs
)
@staticmethod
def get_bugs(signatures):
@ -193,21 +220,28 @@ class Bugs(Socorro):
Returns:
dict: the bugs for each signature
"""
def default_handler(json, data):
if json['total']:
for hit in json['hits']:
signature = hit['signature']
if json["total"]:
for hit in json["hits"]:
signature = hit["signature"]
if signature in data:
data[signature].add(hit['id'])
data[signature].add(hit["id"])
if isinstance(signatures, six.string_types):
data = {signatures: set()}
Bugs(params={'signatures': signatures}, handler=default_handler, handlerdata=data).wait()
Bugs(
params={"signatures": signatures},
handler=default_handler,
handlerdata=data,
).wait()
else:
data = {s: set() for s in signatures}
queries = []
for sgns in Connection.chunks(signatures, 10):
queries.append(Query(Bugs.URL, {'signatures': sgns}, default_handler, data))
queries.append(
Query(Bugs.URL, {"signatures": sgns}, default_handler, data)
)
Bugs(queries=queries).wait()
for k, v in data.items():

Просмотреть файл

@ -2,20 +2,20 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import six
import operator
import calendar
from datetime import (datetime, date, timedelta)
from dateutil.relativedelta import relativedelta
import math
import operator
import os.path
import random
from datetime import date, datetime, timedelta
import dateutil.parser
import pytz
import six
from dateutil.relativedelta import relativedelta
from requests.utils import quote
import os.path
__pacific = pytz.timezone('US/Pacific')
__pacific = pytz.timezone("US/Pacific")
def get_best(stats):
@ -43,7 +43,7 @@ def get_timestamp(dt):
int: the corresponding timestamp
"""
if isinstance(dt, six.string_types):
dt = datetime.utcnow() if dt == 'now' else get_date_ymd(dt)
dt = datetime.utcnow() if dt == "now" else get_date_ymd(dt)
return int(calendar.timegm(dt.timetuple()))
@ -73,15 +73,17 @@ def get_date_ymd(dt):
if isinstance(dt, datetime):
return as_utc(dt)
if dt == 'today':
if dt == "today":
today = datetime.utcnow()
return pytz.utc.localize(datetime(today.year, today.month, today.day))
elif dt == 'tomorrow':
elif dt == "tomorrow":
tomorrow = datetime.utcnow() + timedelta(1)
return pytz.utc.localize(datetime(tomorrow.year, tomorrow.month, tomorrow.day))
elif dt == 'yesterday':
elif dt == "yesterday":
yesterday = datetime.utcnow() - timedelta(1)
return pytz.utc.localize(datetime(yesterday.year, yesterday.month, yesterday.day))
return pytz.utc.localize(
datetime(yesterday.year, yesterday.month, yesterday.day)
)
return as_utc(dateutil.parser.parse(dt))
@ -103,7 +105,7 @@ def get_date_str(ymd):
Returns:
str: the date as a string 'Year-month-day'
"""
return ymd.strftime('%Y-%m-%d')
return ymd.strftime("%Y-%m-%d")
def get_date(_date, delta=None):
@ -140,7 +142,7 @@ def is64(cpu_name):
Returns:
bool: True if 64 is in the name
"""
return '64' in cpu_name
return "64" in cpu_name
def percent(x):
@ -166,7 +168,7 @@ def simple_percent(x):
"""
if math.floor(x) == x:
x = int(x)
return str(x) + '%'
return str(x) + "%"
def get_sample(data, fraction):
@ -203,7 +205,9 @@ def get_date_from_buildid(bid):
minute = int(bid[10:12])
second = int(bid[12:14])
return __pacific.localize(datetime(year, month, day, hour, minute, second)).astimezone(pytz.utc)
return __pacific.localize(
datetime(year, month, day, hour, minute, second)
).astimezone(pytz.utc)
def get_buildid_from_date(d):
@ -215,7 +219,7 @@ def get_buildid_from_date(d):
Returns:
str: the build_id
"""
return d.astimezone(__pacific).strftime('%Y%m%d%H%M%S')
return d.astimezone(__pacific).strftime("%Y%m%d%H%M%S")
def as_utc(d):
@ -244,7 +248,11 @@ def get_moz_date(d):
Returns:
datetime.datetime: the localized date
"""
return pytz.timezone('US/Pacific').localize(dateutil.parser.parse(d)).astimezone(pytz.utc)
return (
pytz.timezone("US/Pacific")
.localize(dateutil.parser.parse(d))
.astimezone(pytz.utc)
)
def rate(x, y):
@ -257,18 +265,18 @@ def rate(x, y):
Returns:
float: x / y or Nan if y == 0
"""
return float(x) / float(y) if y else float('nan')
return float(x) / float(y) if y else float("nan")
def get_guttenberg_death():
return get_date_ymd('1468-02-03T00:00:00Z')
return get_date_ymd("1468-02-03T00:00:00Z")
def signatures_parser(signatures):
_set = set()
if signatures:
signatures = map(lambda s: s.strip(' \t\r\n'), signatures.split('[@'))
signatures = map(lambda s: s[:-1].strip(' \t\r\n'), filter(None, signatures))
signatures = map(lambda s: s.strip(" \t\r\n"), signatures.split("[@"))
signatures = map(lambda s: s[:-1].strip(" \t\r\n"), filter(None, signatures))
for s in filter(None, signatures):
_set.add(s)
return list(_set)
@ -278,7 +286,10 @@ def get_monday_sunday(date):
iso = date.isocalendar()
delta_monday = iso[2] - 1
delta_sunday = 7 - iso[2]
return date - relativedelta(days=delta_monday), date + relativedelta(days=delta_sunday)
return (
date - relativedelta(days=delta_monday),
date + relativedelta(days=delta_sunday),
)
def mean_stddev(x):
@ -291,7 +302,7 @@ def mean_stddev(x):
def get_channels():
return ['nightly', 'aurora', 'beta', 'release', 'esr']
return ["nightly", "aurora", "beta", "release", "esr"]
def get_str_list(x):
@ -305,17 +316,32 @@ def get_str_list(x):
def get_x_fwed_for_str(s):
if isinstance(s, six.string_types):
return ', '.join(map(lambda x: x.strip(' \t'), s.split(',')))
return ", ".join(map(lambda x: x.strip(" \t"), s.split(",")))
else:
return ', '.join(map(lambda x: x.strip(' \t'), s))
return ", ".join(map(lambda x: x.strip(" \t"), s))
def get_params_for_url(params):
return '?' + '&'.join([quote(name) + '=' + quote(str(value)) if not isinstance(value, list) else '&'.join([quote(name) + '=' + quote(str(intValue)) for intValue in value]) for name, value in sorted(params.items(), key=lambda p: p[0]) if value is not None]) if params else ''
return (
"?"
+ "&".join(
[
quote(name) + "=" + quote(str(value))
if not isinstance(value, list)
else "&".join(
[quote(name) + "=" + quote(str(intValue)) for intValue in value]
)
for name, value in sorted(params.items(), key=lambda p: p[0])
if value is not None
]
)
if params
else ""
)
def get_url(url):
return url if url.endswith('/') else url + '/'
return url if url.endswith("/") else url + "/"
def get_language(path):
@ -326,24 +352,31 @@ def get_language(path):
name = os.path.basename(path)
extension = os.path.splitext(name)[1][1:]
langs = {
'Shell': ['sh'],
'Xml': ['xml', 'xst'],
'Html': ['html', 'xhtml'],
'Css': ['css'],
'Javascript': ['js', 'jsm'],
'Makefile': ['mk', 'Makefile', 'Makefile.am', 'Makefile.in', 'configure.in', 'autoconf.mk.in'],
'C++': ['cpp', 'hpp', 'hh'],
'C': ['c', 'h'],
'Java': ['java'],
'Font': ['ttf', 'ttf^headers^'],
'Tests': ['reftest.list', 'crashtests.list', ],
'Windows IDL': ['idl'],
'Mozilla XUL': ['xul'],
'Ini': ['ini'],
'License': ['LICENSE'],
'Python': ['py'],
'moz.build': ['moz.build'],
'Rust': ['rs'],
"Shell": ["sh"],
"Xml": ["xml", "xst"],
"Html": ["html", "xhtml"],
"Css": ["css"],
"Javascript": ["js", "jsm"],
"Makefile": [
"mk",
"Makefile",
"Makefile.am",
"Makefile.in",
"configure.in",
"autoconf.mk.in",
],
"C++": ["cpp", "hpp", "hh"],
"C": ["c", "h"],
"Java": ["java"],
"Font": ["ttf", "ttf^headers^"],
"Tests": ["reftest.list", "crashtests.list"],
"Windows IDL": ["idl"],
"Mozilla XUL": ["xul"],
"Ini": ["ini"],
"License": ["LICENSE"],
"Python": ["py"],
"moz.build": ["moz.build"],
"Rust": ["rs"],
}
for lang, names in langs.items():
if name in names or extension in names:

Просмотреть файл

@ -2,11 +2,13 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os.path import commonprefix
import re
from datetime import timedelta
from icalendar import Calendar
from os.path import commonprefix
import requests
from icalendar import Calendar
from . import utils
__versions = None
@ -14,18 +16,22 @@ __version_dates = None
__stability_version_dates = None
URL_VERSIONS = 'https://product-details.mozilla.org/1.0/firefox_versions.json'
URL_HISTORY = 'https://product-details.mozilla.org/1.0/firefox_history_major_releases.json'
URL_CALENDAR = 'https://www.google.com/calendar/ical/mozilla.com_2d37383433353432352d3939%40resource.calendar.google.com/public/basic.ics'
URL_STABILITY = 'https://product-details.mozilla.org/1.0/firefox_history_stability_releases.json'
URL_VERSIONS = "https://product-details.mozilla.org/1.0/firefox_versions.json"
URL_HISTORY = (
"https://product-details.mozilla.org/1.0/firefox_history_major_releases.json"
)
URL_CALENDAR = "https://www.google.com/calendar/ical/mozilla.com_2d37383433353432352d3939%40resource.calendar.google.com/public/basic.ics"
URL_STABILITY = (
"https://product-details.mozilla.org/1.0/firefox_history_stability_releases.json"
)
REGEX_EVENT = re.compile('Firefox ([0-9]+) Release', re.IGNORECASE)
REGEX_EVENT = re.compile("Firefox ([0-9]+) Release", re.IGNORECASE)
def __get_major(v):
if not v:
return
return int(v.split('.')[0])
return int(v.split(".")[0])
def __getVersions():
@ -34,23 +40,26 @@ def __getVersions():
Returns:
dict: versions for each channel
"""
def _clean_esr(esr):
if esr is None:
return
return esr.endswith('esr') and esr[:-3] or esr
return esr.endswith("esr") and esr[:-3] or esr
resp = requests.get(URL_VERSIONS)
data = resp.json()
nightly = data['FIREFOX_NIGHTLY']
esr_next = _clean_esr(data['FIREFOX_ESR_NEXT'])
esr = _clean_esr(data['FIREFOX_ESR'])
nightly = data["FIREFOX_NIGHTLY"]
esr_next = _clean_esr(data["FIREFOX_ESR_NEXT"])
esr = _clean_esr(data["FIREFOX_ESR"])
return {'release': data['LATEST_FIREFOX_VERSION'],
'beta': data['LATEST_FIREFOX_RELEASED_DEVEL_VERSION'],
'nightly': nightly,
'esr': esr_next or esr,
'esr_previous': esr_next is not None and esr or None}
return {
"release": data["LATEST_FIREFOX_VERSION"],
"beta": data["LATEST_FIREFOX_RELEASED_DEVEL_VERSION"],
"nightly": nightly,
"esr": esr_next or esr,
"esr_previous": esr_next is not None and esr or None,
}
def __getVersionDates():
@ -63,12 +72,14 @@ def __getVersionDates():
calendar = Calendar.from_ical(resp.content)
for component in calendar.walk():
if component.name == 'VEVENT':
match = REGEX_EVENT.search(component.get('summary'))
if component.name == "VEVENT":
match = REGEX_EVENT.search(component.get("summary"))
if match:
version = match.group(1) + '.0'
version = match.group(1) + ".0"
if version not in data:
data[version] = utils.get_moz_date(utils.get_date_str(component.decoded('dtstart')))
data[version] = utils.get_moz_date(
utils.get_date_str(component.decoded("dtstart"))
)
return data
@ -103,8 +114,14 @@ def __getMatchingVersion(version, versions_dates):
longest_match = []
longest_match_v = None
for v, d in versions_dates:
match = commonprefix([v.split('.'), str(version).split('.')])
if len(match) > 0 and (len(match) > len(longest_match) or (len(match) == len(longest_match) and int(v[-1]) <= int(longest_match_v[-1]))):
match = commonprefix([v.split("."), str(version).split(".")])
if len(match) > 0 and (
len(match) > len(longest_match)
or (
len(match) == len(longest_match)
and int(v[-1]) <= int(longest_match_v[-1])
)
):
longest_match = match
longest_match_v = v
date = d
@ -127,7 +144,9 @@ def getDate(version):
if not __stability_version_dates:
__stability_version_dates = __getStabilityVersionDates()
return __getMatchingVersion(version, list(__version_dates.items()) + list(__stability_version_dates.items()))
return __getMatchingVersion(
version, list(__version_dates.items()) + list(__stability_version_dates.items())
)
def __getCloserDate(date, versions_dates, negative=False):
@ -135,12 +154,10 @@ def __getCloserDate(date, versions_dates, negative=False):
return d - date
future_dates = [
(v, d)
for v, d in versions_dates
if negative or diff(d) > timedelta(0)
(v, d) for v, d in versions_dates if negative or diff(d) > timedelta(0)
]
if not future_dates:
raise Exception('No future release found')
raise Exception("No future release found")
return min(future_dates, key=lambda i: abs(diff(i[1])))
@ -159,4 +176,8 @@ def getCloserRelease(date, negative=False):
if not __stability_version_dates:
__stability_version_dates = __getStabilityVersionDates()
return __getCloserDate(date, list(__version_dates.items()) + list(__stability_version_dates.items()), negative)
return __getCloserDate(
date,
list(__version_dates.items()) + list(__stability_version_dates.items()),
negative,
)

Просмотреть файл

@ -16,7 +16,7 @@ class InvalidWiki(Exception):
class Td(object):
def __init__(self, row_span):
super(Td, self).__init__()
self.data = ''
self.data = ""
self.row_span = int(row_span)
def set(self, data):
@ -66,7 +66,7 @@ class Table(object):
C = len(res[0])
for r in res:
if len(r) < C:
r.extend([''] * (C - len(r)))
r.extend([""] * (C - len(r)))
return res
@ -83,40 +83,40 @@ class WikiParser(HTMLParser):
def feed(self, data):
if not isinstance(data, str):
data = str(data, 'ascii')
data = str(data, "ascii")
HTMLParser.feed(self, data)
def handle_starttag(self, tag, attrs):
if tag == 'table':
if tag == "table":
self.table_counter += 1
if self.table_counter in self.tables_number:
self.table = Table()
if self.table is not None:
if tag == 'tr':
if tag == "tr":
self.table.add_row()
elif tag == 'td':
elif tag == "td":
attrs = dict(attrs)
self.table.add_cell(attrs.get('rowspan', 1))
self.td = ''
elif tag == 'th':
self.table.add_cell(attrs.get("rowspan", 1))
self.td = ""
elif tag == "th":
attrs = dict(attrs)
self.table.add_cell(attrs.get('rowspan', 1))
self.th = ''
self.table.add_cell(attrs.get("rowspan", 1))
self.th = ""
def handle_endtag(self, tag):
if self.table is not None:
if tag == 'table':
if tag == "table":
self.tables.append(self.table)
self.table = None
if self.table_counter == max(self.tables_number):
raise StopIteration()
elif tag == 'td':
elif tag == "td":
self.table.set(self.td)
self.td = None
elif tag == 'th':
elif tag == "th":
self.table.set(self.th)
self.th = None
if tag == 'html':
if tag == "html":
raise StopIteration()
def handle_data(self, data):

Просмотреть файл

@ -8,33 +8,32 @@ import os
from setuptools import find_packages, setup
here = os.path.dirname(__file__)
def load_requirements(filename):
with open(os.path.join(here, filename)) as f:
return f.read().strip().split('\n')
return f.read().strip().split("\n")
with open(os.path.join(here, 'VERSION')) as f:
with open(os.path.join(here, "VERSION")) as f:
version = f.read().strip()
setup(
name='libmozdata',
name="libmozdata",
version=version,
description='Library to access and aggregate several Mozilla data sources.',
author='Mozilla Release Management',
author_email='release-mgmt@mozilla.com',
url='https://github.com/mozilla/libmozdata',
install_requires=load_requirements('requirements.txt'),
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']),
description="Library to access and aggregate several Mozilla data sources.",
author="Mozilla Release Management",
author_email="release-mgmt@mozilla.com",
url="https://github.com/mozilla/libmozdata",
install_requires=load_requirements("requirements.txt"),
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
include_package_data=True,
zip_safe=False,
license='MPL2',
license="MPL2",
classifiers=[
'Operating System :: OS Independent',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 3',
"Operating System :: OS Independent",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Programming Language :: Python :: 3",
],
)

Просмотреть файл

@ -1,3 +1,3 @@
coverage
flake8
responses
pre-commit

Просмотреть файл

@ -1,12 +1,14 @@
import unittest
import os
import responses
import gzip
import pickle
import re
import hashlib
import logging
import os
import pickle
import re
import sys
import unittest
import responses
try:
from urllib.parse import urlparse, parse_qsl
from urllib.request import Request, urlopen
@ -18,7 +20,7 @@ except ImportError:
logger = logging.getLogger(__name__)
MOCKS_DIR = os.path.join(os.path.dirname(__file__), 'mocks')
MOCKS_DIR = os.path.join(os.path.dirname(__file__), "mocks")
class MockTestCase(unittest.TestCase):
@ -26,45 +28,42 @@ class MockTestCase(unittest.TestCase):
Mock responses from any webserver (through requests)
Register local responses when none are found
"""
mock_urls = []
def setUp(self):
# Setup mock callbacks
for mock_url in self.mock_urls:
url_re = re.compile(r'^{}'.format(mock_url))
url_re = re.compile(r"^{}".format(mock_url))
responses.add_callback(
responses.GET,
url_re,
callback=self.request_callback,
content_type='application/json',
content_type="application/json",
)
def request_callback(self, request):
logger.debug('Mock request {} {}'.format(request.method, request.url))
logger.debug("Mock request {} {}".format(request.method, request.url))
path = self.build_path(request.method, request.url)
if os.path.exists(path):
# Load local file
logger.info('Using mock file {}'.format(path))
with gzip.open(path, 'rb') as f:
logger.info("Using mock file {}".format(path))
with gzip.open(path, "rb") as f:
response = pickle.load(f)
else:
# Build from actual request
logger.info('Building mock file {}'.format(path))
logger.info("Building mock file {}".format(path))
response = self.real_request(request)
# Save in local file for future use
with gzip.open(path, 'wb') as f:
with gzip.open(path, "wb") as f:
# Use old pickle ascii protocol (default)
# to be compatible with Python 2
f.write(pickle.dumps(response, protocol=2))
return (
response['status'],
response['headers'],
response['body'],
)
return (response["status"], response["headers"], response["body"])
def build_path(self, method, url):
"""
@ -72,28 +71,30 @@ class MockTestCase(unittest.TestCase):
"""
# Build directory to request
out = urlparse(url)
parts = [
'{}_{}'.format(out.scheme, out.hostname),
]
parts += filter(None, out.path.split('/'))
parts = ["{}_{}".format(out.scheme, out.hostname)]
parts += filter(None, out.path.split("/"))
directory = os.path.join(MOCKS_DIR, *parts)
# Build sorted query filename
query = sorted(parse_qsl(out.query))
query = ['{}={}'.format(k, v.replace('/', '_')) for k, v in query]
query_str = '_'.join(query)
query = ["{}={}".format(k, v.replace("/", "_")) for k, v in query]
query_str = "_".join(query)
# Use hashes to avoid too long names
if len(query_str) > 150:
query_str = '{}_{}'.format(query_str[0:100], hashlib.md5(query_str.encode('utf-8')).hexdigest())
filename = '{}_{}.gz'.format(method, query_str)
query_str = "{}_{}".format(
query_str[0:100], hashlib.md5(query_str.encode("utf-8")).hexdigest()
)
filename = "{}_{}.gz".format(method, query_str)
# Build directory
if not os.path.isdir(directory):
try:
os.makedirs(directory)
except Exception as e:
logger.error('Concurrency error when building directories: {}'.format(e))
logger.error(
"Concurrency error when building directories: {}".format(e)
)
return os.path.join(directory, filename)
@ -106,28 +107,26 @@ class MockTestCase(unittest.TestCase):
# No gzip !
headers = dict([(k.lower(), v) for k, v in request.headers.items()])
if 'accept-encoding' in headers:
del headers['accept-encoding']
if "accept-encoding" in headers:
del headers["accept-encoding"]
# Method arg is not supported by Python 2
if sys.version_info >= (3, 0):
real_req = Request(request.url, request.body, headers=headers, method=request.method)
real_req = Request(
request.url, request.body, headers=headers, method=request.method
)
else:
real_req = Request(request.url, request.body, headers=headers)
try:
resp = urlopen(real_req)
except HTTPError as e:
logger.error('HTTP Error saved for {}: {}'.format(request.url, e))
return {
'status': e.code,
'headers': {},
'body': '',
}
logger.error("HTTP Error saved for {}: {}".format(request.url, e))
return {"status": e.code, "headers": {}, "body": ""}
return {
'status': resp.code,
"status": resp.code,
# TODO: fix cookie usage bug
# 'headers': dict(resp.getheaders()),
'headers': {},
'body': resp.read().decode('utf-8'),
"headers": {},
"body": resp.read().decode("utf-8"),
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -6,7 +6,6 @@ from libmozdata import buildhub
class BuidlhubTest(unittest.TestCase):
@responses.activate
def test_get_distinct_versions(self):
responses.add(

Просмотреть файл

@ -3,37 +3,37 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata.BZInfo import BZInfo
class BZInfoTest(unittest.TestCase):
def test_bzinfo(self):
info = BZInfo(12345).get()
self.assertIn('12345', info)
info = info['12345']
self.assertTrue(info['authorized'])
self.assertEqual(info['owner'], u'jefft@formerly-netscape.com.tld')
self.assertIn("12345", info)
info = info["12345"]
self.assertTrue(info["authorized"])
self.assertEqual(info["owner"], u"jefft@formerly-netscape.com.tld")
# self.assertIn(u'jefft@formerly-netscape.com.tld', info['commenters'])
self.assertEqual(info['component'], 'Backend')
self.assertEqual(info['product'], 'MailNews Core')
self.assertEqual(info["component"], "Backend")
self.assertEqual(info["product"], "MailNews Core")
def test_bzinfo_unauthorized(self):
bzi = BZInfo(1269839)
info = bzi.get()
self.assertIn('1269839', info)
info = info['1269839']
self.assertFalse(info['authorized'])
self.assertEqual(info['ownership'], [])
self.assertIn("1269839", info)
info = info["1269839"]
self.assertFalse(info["authorized"])
self.assertEqual(info["ownership"], [])
# self.assertEqual(info['commenters'], {})
self.assertEqual(info['reviewers'], set())
self.assertEqual(info["reviewers"], set())
self.assertIsNone(bzi.get_best_collaborator())
self.assertIsNone(bzi.get_best_component_product())
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -2,9 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import sys
import os
import sys
import unittest
try:
from configparser import ConfigParser
except ImportError:
@ -12,146 +13,158 @@ except ImportError:
class ConfigTest(unittest.TestCase):
def tearDown(self):
try:
del sys.modules['libmozdata']
del sys.modules['libmozdata.config']
del sys.modules["libmozdata"]
del sys.modules["libmozdata.config"]
except KeyError:
pass
try:
os.remove('config.ini')
os.remove("config.ini")
except OSError:
pass
try:
os.remove('mozdata.ini')
os.remove("mozdata.ini")
except OSError:
pass
try:
os.remove(os.path.expanduser('~/.mozdata.ini'))
os.remove(os.path.expanduser("~/.mozdata.ini"))
except OSError:
pass
try:
os.rename('mozdata.ini.bak', 'mozdata.ini')
os.rename("mozdata.ini.bak", "mozdata.ini")
except OSError:
pass
try:
os.rename(os.path.expanduser('~/.mozdata.ini.bak'), os.path.expanduser('~/.mozdata.ini'))
os.rename(
os.path.expanduser("~/.mozdata.ini.bak"),
os.path.expanduser("~/.mozdata.ini"),
)
except OSError:
pass
def setUp(self):
try:
os.rename('mozdata.ini', 'mozdata.ini.bak')
os.rename("mozdata.ini", "mozdata.ini.bak")
except OSError:
pass
try:
os.rename(os.path.expanduser('~/.mozdata.ini'), os.path.expanduser('~/.mozdata.ini.bak'))
os.rename(
os.path.expanduser("~/.mozdata.ini"),
os.path.expanduser("~/.mozdata.ini.bak"),
)
except OSError:
pass
def test_config_doesnt_exist(self):
from libmozdata import config
self.assertIsNone(config.get('Section', 'Option'))
self.assertEqual(config.get('Section', 'Option', 'Default'), 'Default')
self.assertIsNone(config.get("Section", "Option"))
self.assertEqual(config.get("Section", "Option", "Default"), "Default")
def test_config_exists_in_cwd(self):
with open('mozdata.ini', 'w') as f:
with open("mozdata.ini", "w") as f:
custom_conf = ConfigParser()
custom_conf.add_section('Section')
custom_conf.set('Section', 'Option', 'Value')
custom_conf.set('Section', 'Option2', 'Value2')
custom_conf.add_section('Section2')
custom_conf.set('Section2', 'Option', 'Value3')
custom_conf.add_section("Section")
custom_conf.set("Section", "Option", "Value")
custom_conf.set("Section", "Option2", "Value2")
custom_conf.add_section("Section2")
custom_conf.set("Section2", "Option", "Value3")
custom_conf.write(f)
from libmozdata import config
self.assertEqual(config.get('Section', 'Option'), 'Value')
self.assertEqual(config.get('Section', 'Option', 'Default'), 'Value')
self.assertEqual(config.get('Section', 'Option2'), 'Value2')
self.assertEqual(config.get('Section', 'Option2', 'Default'), 'Value2')
self.assertEqual(config.get('Section2', 'Option'), 'Value3')
self.assertEqual(config.get('Section2', 'Option', 'Default'), 'Value3')
self.assertIsNone(config.get('Section', 'Option3'))
self.assertEqual(config.get('Section', 'Option3', 'Default'), 'Default')
self.assertEqual(config.get("Section", "Option"), "Value")
self.assertEqual(config.get("Section", "Option", "Default"), "Value")
self.assertEqual(config.get("Section", "Option2"), "Value2")
self.assertEqual(config.get("Section", "Option2", "Default"), "Value2")
self.assertEqual(config.get("Section2", "Option"), "Value3")
self.assertEqual(config.get("Section2", "Option", "Default"), "Value3")
self.assertIsNone(config.get("Section", "Option3"))
self.assertEqual(config.get("Section", "Option3", "Default"), "Default")
def test_config_get_with_type(self):
with open('mozdata.ini', 'w') as f:
with open("mozdata.ini", "w") as f:
custom_conf = ConfigParser()
custom_conf.add_section('Section')
custom_conf.set('Section', 'Option', 'Value')
custom_conf.set('Section', 'Option2', '123')
custom_conf.add_section('Section2')
custom_conf.set('Section2', 'Option', 'Value1, Value2, Value3')
custom_conf.add_section("Section")
custom_conf.set("Section", "Option", "Value")
custom_conf.set("Section", "Option2", "123")
custom_conf.add_section("Section2")
custom_conf.set("Section2", "Option", "Value1, Value2, Value3")
custom_conf.write(f)
from libmozdata import config
self.assertEqual(config.get('Section', 'Option'), 'Value')
self.assertEqual(config.get('Section', 'Option2', type=int), 123)
self.assertEqual(config.get('Section', 'Option2', type=str), '123')
self.assertEqual(config.get('Section2', 'Option', type=list), ['Value1', 'Value2', 'Value3'])
self.assertEqual(config.get('Section2', 'Option', type=set), {'Value1', 'Value2', 'Value3'})
self.assertEqual(config.get("Section", "Option"), "Value")
self.assertEqual(config.get("Section", "Option2", type=int), 123)
self.assertEqual(config.get("Section", "Option2", type=str), "123")
self.assertEqual(
config.get("Section2", "Option", type=list), ["Value1", "Value2", "Value3"]
)
self.assertEqual(
config.get("Section2", "Option", type=set), {"Value1", "Value2", "Value3"}
)
def test_config_exists_in_home(self):
with open(os.path.expanduser('~/.mozdata.ini'), 'w') as f:
with open(os.path.expanduser("~/.mozdata.ini"), "w") as f:
custom_conf = ConfigParser()
custom_conf.add_section('Section3')
custom_conf.set('Section3', 'Option5', 'Value8')
custom_conf.set('Section3', 'Option6', 'Value9')
custom_conf.add_section('Section4')
custom_conf.set('Section4', 'Option7', 'Value10')
custom_conf.add_section("Section3")
custom_conf.set("Section3", "Option5", "Value8")
custom_conf.set("Section3", "Option6", "Value9")
custom_conf.add_section("Section4")
custom_conf.set("Section4", "Option7", "Value10")
custom_conf.write(f)
from libmozdata import config
self.assertEqual(config.get('Section3', 'Option5'), 'Value8')
self.assertEqual(config.get('Section3', 'Option5', 'Default'), 'Value8')
self.assertEqual(config.get('Section3', 'Option6'), 'Value9')
self.assertEqual(config.get('Section3', 'Option6', 'Default'), 'Value9')
self.assertEqual(config.get('Section4', 'Option7'), 'Value10')
self.assertEqual(config.get('Section4', 'Option7', 'Default'), 'Value10')
self.assertIsNone(config.get('Section3', 'Option7'))
self.assertEqual(config.get('Section3', 'Option7', 'Default'), 'Default')
self.assertEqual(config.get("Section3", "Option5"), "Value8")
self.assertEqual(config.get("Section3", "Option5", "Default"), "Value8")
self.assertEqual(config.get("Section3", "Option6"), "Value9")
self.assertEqual(config.get("Section3", "Option6", "Default"), "Value9")
self.assertEqual(config.get("Section4", "Option7"), "Value10")
self.assertEqual(config.get("Section4", "Option7", "Default"), "Value10")
self.assertIsNone(config.get("Section3", "Option7"))
self.assertEqual(config.get("Section3", "Option7", "Default"), "Default")
def test_config_exists_in_custom_path(self):
with open('config.ini', 'w') as f:
with open("config.ini", "w") as f:
custom_conf = ConfigParser()
custom_conf.add_section('Section5')
custom_conf.set('Section5', 'Option7', 'Value11')
custom_conf.set('Section5', 'Option8', 'Value12')
custom_conf.add_section('Section6')
custom_conf.set('Section6', 'Option9', 'Value13')
custom_conf.add_section("Section5")
custom_conf.set("Section5", "Option7", "Value11")
custom_conf.set("Section5", "Option8", "Value12")
custom_conf.add_section("Section6")
custom_conf.set("Section6", "Option9", "Value13")
custom_conf.write(f)
from libmozdata import config
config.set_config(config.ConfigIni('config.ini'))
self.assertEqual(config.get('Section5', 'Option7'), 'Value11')
self.assertEqual(config.get('Section5', 'Option7', 'Default'), 'Value11')
self.assertEqual(config.get('Section5', 'Option8'), 'Value12')
self.assertEqual(config.get('Section5', 'Option8', 'Default'), 'Value12')
self.assertEqual(config.get('Section6', 'Option9'), 'Value13')
self.assertEqual(config.get('Section6', 'Option9', 'Default'), 'Value13')
self.assertIsNone(config.get('Section5', 'Option9'))
self.assertEqual(config.get('Section5', 'Option9', 'Default'), 'Default')
config.set_config(config.ConfigIni("config.ini"))
self.assertEqual(config.get("Section5", "Option7"), "Value11")
self.assertEqual(config.get("Section5", "Option7", "Default"), "Value11")
self.assertEqual(config.get("Section5", "Option8"), "Value12")
self.assertEqual(config.get("Section5", "Option8", "Default"), "Value12")
self.assertEqual(config.get("Section6", "Option9"), "Value13")
self.assertEqual(config.get("Section6", "Option9", "Default"), "Value13")
self.assertIsNone(config.get("Section5", "Option9"))
self.assertEqual(config.get("Section5", "Option9", "Default"), "Default")
class ConfigEnvTest(unittest.TestCase):
def test_config_env(self):
from libmozdata import config
os.environ['LIBMOZDATA_CFG_BUGZILLA_TOKEN'] = 'my_bugzilla_api_key'
os.environ["LIBMOZDATA_CFG_BUGZILLA_TOKEN"] = "my_bugzilla_api_key"
cfg = config.ConfigEnv()
self.assertEqual(cfg.get('Bugzilla', 'token', 'default'), 'my_bugzilla_api_key')
self.assertEqual(cfg.get('Section', 'Option', 'default'), 'default')
self.assertEqual(cfg.get("Bugzilla", "token", "default"), "my_bugzilla_api_key")
self.assertEqual(cfg.get("Section", "Option", "default"), "default")
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,21 +3,34 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata.connection import Query
class QueryTest(unittest.TestCase):
def test(self):
self.assertEqual(str(Query('https://www.mozilla.org/')), 'url: https://www.mozilla.org/')
self.assertEqual(
str(Query("https://www.mozilla.org/")), "url: https://www.mozilla.org/"
)
def test_args(self):
representation = str(Query('https://www.mozilla.org/', {
'var1': True,
'var2': ['value2', 'value3'],
'var3': None
}))
self.assertTrue(representation == 'url: https://www.mozilla.org/?var1=True&var2=value2&var2=value3' or representation == 'url: https://www.mozilla.org/?var2=value2&var2=value3&var1=True')
representation = str(
Query(
"https://www.mozilla.org/",
{"var1": True, "var2": ["value2", "value3"], "var3": None},
)
)
self.assertTrue(
representation
== "url: https://www.mozilla.org/?var1=True&var2=value2&var2=value3"
or representation
== "url: https://www.mozilla.org/?var2=value2&var2=value3&var1=True"
)
representation = str(Query('https://www.mozilla.org/', [{'var1': True}, {'var2': 'marco'}]))
self.assertEqual(representation, 'url: https://www.mozilla.org/?var1=True\nurl: https://www.mozilla.org/?var2=marco')
representation = str(
Query("https://www.mozilla.org/", [{"var1": True}, {"var2": "marco"}])
)
self.assertEqual(
representation,
"url: https://www.mozilla.org/?var1=True\nurl: https://www.mozilla.org/?var2=marco",
)

Просмотреть файл

@ -3,70 +3,90 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import six
from libmozdata.FileStats import FileStats
from libmozdata import utils
from libmozdata.FileStats import FileStats
class FileStatsTest(unittest.TestCase):
def test_filestats(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
info = FileStats(path).get_info()
self.assertIsNotNone(info)
self.assertEqual(info['path'], 'netwerk/protocol/http/nsHttpConnectionMgr.cpp')
self.assertEqual(info['module'], 'Necko')
six.assertCountEqual(self, info['components'], ['Core::Networking', 'Core::Networking: Cache', 'Core::Networking: Cookies', 'Core::Networking: FTP', 'Core::Networking: File', 'Core::Networking: HTTP', 'Core::Networking: JAR', 'Core::Networking: Websockets'])
self.assertGreater(len(info['owners']), 0)
self.assertGreater(len(info['peers']), 0)
self.assertEqual(info["path"], "netwerk/protocol/http/nsHttpConnectionMgr.cpp")
self.assertEqual(info["module"], "Necko")
six.assertCountEqual(
self,
info["components"],
[
"Core::Networking",
"Core::Networking: Cache",
"Core::Networking: Cookies",
"Core::Networking: FTP",
"Core::Networking: File",
"Core::Networking: HTTP",
"Core::Networking: JAR",
"Core::Networking: Websockets",
],
)
self.assertGreater(len(info["owners"]), 0)
self.assertGreater(len(info["peers"]), 0)
def test_filestats_no_bugs(self):
path = 'LEGAL'
path = "LEGAL"
info = FileStats(path).get_info()
self.assertEqual(info['components'], set())
self.assertIsNone(info['needinfo'])
self.assertEqual(info['path'], path)
self.assertEqual(len(info['guilty']['patches']), 1)
self.assertEqual(info['guilty']['main_author'], 'hg@mozilla.com')
self.assertEqual(info['guilty']['last_author'], 'hg@mozilla.com')
self.assertNotIn('bugs', info)
self.assertEqual(info["components"], set())
self.assertIsNone(info["needinfo"])
self.assertEqual(info["path"], path)
self.assertEqual(len(info["guilty"]["patches"]), 1)
self.assertEqual(info["guilty"]["main_author"], "hg@mozilla.com")
self.assertEqual(info["guilty"]["last_author"], "hg@mozilla.com")
self.assertNotIn("bugs", info)
def test_filestats_date(self):
path = 'LICENSE'
info = FileStats(path, utc_ts=utils.get_timestamp('today')).get_info()
self.assertEqual(info['components'], set())
self.assertIsNotNone(info['needinfo'])
self.assertEqual(info['path'], path)
self.assertIsNone(info['guilty'])
path = "LICENSE"
info = FileStats(path, utc_ts=utils.get_timestamp("today")).get_info()
self.assertEqual(info["components"], set())
self.assertIsNotNone(info["needinfo"])
self.assertEqual(info["path"], path)
self.assertIsNone(info["guilty"])
info = FileStats(path, utc_ts=utils.get_timestamp('2010-04-06')).get_info()
self.assertEqual(info['infered_component'], 'Core::General')
self.assertEqual(info['needinfo'], 'philringnalda@gmail.com')
self.assertEqual(info['path'], path)
self.assertEqual(len(info['guilty']['patches']), 1)
self.assertEqual(info['guilty']['main_author'], 'philringnalda@gmail.com')
self.assertEqual(info['guilty']['last_author'], 'philringnalda@gmail.com')
self.assertEqual(info['bugs'], 1)
info = FileStats(path, utc_ts=utils.get_timestamp("2010-04-06")).get_info()
self.assertEqual(info["infered_component"], "Core::General")
self.assertEqual(info["needinfo"], "philringnalda@gmail.com")
self.assertEqual(info["path"], path)
self.assertEqual(len(info["guilty"]["patches"]), 1)
self.assertEqual(info["guilty"]["main_author"], "philringnalda@gmail.com")
self.assertEqual(info["guilty"]["last_author"], "philringnalda@gmail.com")
self.assertEqual(info["bugs"], 1)
self.assertEqual(info, FileStats(path, utc_ts=utils.get_timestamp('2010-04-07')).get_info())
self.assertEqual(info, FileStats(path, utc_ts=utils.get_timestamp('2010-04-08')).get_info())
self.assertEqual(
info, FileStats(path, utc_ts=utils.get_timestamp("2010-04-07")).get_info()
)
self.assertEqual(
info, FileStats(path, utc_ts=utils.get_timestamp("2010-04-08")).get_info()
)
info = FileStats(path, utc_ts=utils.get_timestamp('2010-04-09')).get_static_info()
self.assertEqual(info['components'], set())
self.assertIsNone(info['needinfo'])
self.assertEqual(info['path'], path)
self.assertIsNone(info['guilty'])
self.assertNotIn('bugs', info)
info = FileStats(
path, utc_ts=utils.get_timestamp("2010-04-09")
).get_static_info()
self.assertEqual(info["components"], set())
self.assertIsNone(info["needinfo"])
self.assertEqual(info["path"], path)
self.assertIsNone(info["guilty"])
self.assertNotIn("bugs", info)
info = FileStats(path, utc_ts=utils.get_timestamp('2008-03-21')).get_info()
self.assertEqual(info['infered_component'], 'Core::General')
self.assertEqual(info['needinfo'], 'philringnalda@gmail.com')
self.assertEqual(info['path'], path)
self.assertEqual(len(info['guilty']['patches']), 1)
self.assertEqual(info['guilty']['main_author'], 'hg@mozilla.com')
self.assertEqual(info['guilty']['last_author'], 'hg@mozilla.com')
self.assertEqual(info['bugs'], 1)
info = FileStats(path, utc_ts=utils.get_timestamp("2008-03-21")).get_info()
self.assertEqual(info["infered_component"], "Core::General")
self.assertEqual(info["needinfo"], "philringnalda@gmail.com")
self.assertEqual(info["path"], path)
self.assertEqual(len(info["guilty"]["patches"]), 1)
self.assertEqual(info["guilty"]["main_author"], "hg@mozilla.com")
self.assertEqual(info["guilty"]["last_author"], "hg@mozilla.com")
self.assertEqual(info["bugs"], 1)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,177 +3,203 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import responses
from libmozdata import utils
from libmozdata.HGFileInfo import HGFileInfo
from libmozdata.hgmozilla import Mercurial
from libmozdata import utils
from tests.auto_mock import MockTestCase
import responses
class HGFileInfoTest(MockTestCase):
mock_urls = [
Mercurial.HG_URL,
]
mock_urls = [Mercurial.HG_URL]
@responses.activate
def test_hgfileinfo(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
hi = HGFileInfo(path)
fi = hi.get(path)
self.assertIn('authors', fi)
self.assertIsNotNone(fi['authors'])
self.assertIn('bugs', fi)
self.assertIsNotNone(fi['bugs'])
self.assertIn("authors", fi)
self.assertIsNotNone(fi["authors"])
self.assertIn("bugs", fi)
self.assertIsNotNone(fi["bugs"])
@responses.activate
def test_hgfileinfo_date(self):
path = 'LICENSE'
path = "LICENSE"
hi = HGFileInfo(path)
fi = hi.get(path)
self.assertEqual(len(fi['authors']), 2)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 2)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
self.assertEqual(fi['patches'][1]['user'], 'hg@mozilla.com')
self.assertEqual(len(fi["authors"]), 2)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 2)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
self.assertEqual(fi["patches"][1]["user"], "hg@mozilla.com")
fi = hi.get(path, utils.get_timestamp('2009-01-01'))
self.assertEqual(len(fi['authors']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
fi = hi.get(path, utils.get_timestamp("2009-01-01"))
self.assertEqual(len(fi["authors"]), 1)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
fi = hi.get(path, utils.get_timestamp('2008-01-01'), utils.get_timestamp('2009-01-01'))
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'hg@mozilla.com')
fi = hi.get(
path, utils.get_timestamp("2008-01-01"), utils.get_timestamp("2009-01-01")
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "hg@mozilla.com")
fi = hi.get(path, utc_ts_to=utils.get_timestamp('2009-01-01'))
self.assertEqual(len(fi['authors']), 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'hg@mozilla.com')
fi = hi.get(path, utc_ts_to=utils.get_timestamp("2009-01-01"))
self.assertEqual(len(fi["authors"]), 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "hg@mozilla.com")
fi = hi.get(path, utils.get_timestamp('2006-01-01'), utils.get_timestamp('2007-01-01'))
self.assertEqual(fi['authors'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(fi['patches'], [])
fi = hi.get(
path, utils.get_timestamp("2006-01-01"), utils.get_timestamp("2007-01-01")
)
self.assertEqual(fi["authors"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(fi["patches"], [])
fi = hi.get(path, utils.get_timestamp('2008-01-01'), utils.get_timestamp('2012-01-01'))
self.assertEqual(len(fi['authors']), 2)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 2)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
self.assertEqual(fi['patches'][1]['user'], 'hg@mozilla.com')
fi = hi.get(
path, utils.get_timestamp("2008-01-01"), utils.get_timestamp("2012-01-01")
)
self.assertEqual(len(fi["authors"]), 2)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 2)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
self.assertEqual(fi["patches"][1]["user"], "hg@mozilla.com")
@responses.activate
def test_hgfileinfo_creation_vs_push_date(self):
path = 'LICENSE'
hi = HGFileInfo(path, date_type='creation')
path = "LICENSE"
hi = HGFileInfo(path, date_type="creation")
fi = hi.get(path, utc_ts_to=utils.get_timestamp('2010-02-22'))
self.assertEqual(len(fi['authors']), 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'hg@mozilla.com')
fi = hi.get(path, utc_ts_to=utils.get_timestamp("2010-02-22"))
self.assertEqual(len(fi["authors"]), 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "hg@mozilla.com")
fi = hi.get(path, utc_ts_to=utils.get_timestamp('2010-02-24'))
self.assertEqual(len(fi['authors']), 2)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 2)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
self.assertEqual(fi['patches'][1]['user'], 'hg@mozilla.com')
fi = hi.get(path, utc_ts_to=utils.get_timestamp("2010-02-24"))
self.assertEqual(len(fi["authors"]), 2)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 2)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
self.assertEqual(fi["patches"][1]["user"], "hg@mozilla.com")
hi = HGFileInfo(path, date_type='push')
hi = HGFileInfo(path, date_type="push")
fi = hi.get(path, utc_ts_to=utils.get_timestamp('2010-02-24'))
self.assertEqual(len(fi['authors']), 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'hg@mozilla.com')
fi = hi.get(path, utc_ts_to=utils.get_timestamp("2010-02-24"))
self.assertEqual(len(fi["authors"]), 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "hg@mozilla.com")
fi = hi.get(path, utc_ts_to=utils.get_timestamp('2010-04-07'))
self.assertEqual(len(fi['authors']), 2)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 2)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
self.assertEqual(fi['patches'][1]['user'], 'hg@mozilla.com')
fi = hi.get(path, utc_ts_to=utils.get_timestamp("2010-04-07"))
self.assertEqual(len(fi["authors"]), 2)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 2)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
self.assertEqual(fi["patches"][1]["user"], "hg@mozilla.com")
@responses.activate
def test_hgfileinfo_author(self):
path = 'LICENSE'
path = "LICENSE"
hi = HGFileInfo(path)
fi = hi.get(path, authors=['hg@mozilla.com'])
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set())
self.assertEqual(len(fi['patches']), 1)
self.assertEqual(fi['patches'][0]['user'], 'hg@mozilla.com')
fi = hi.get(path, authors=["hg@mozilla.com"])
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set())
self.assertEqual(len(fi["patches"]), 1)
self.assertEqual(fi["patches"][0]["user"], "hg@mozilla.com")
self.assertEqual(fi, hi.get(path, utils.get_timestamp('2008-01-01'), utils.get_timestamp('2012-01-01'), authors=['hg@mozilla.com']))
self.assertEqual(
fi,
hi.get(
path,
utils.get_timestamp("2008-01-01"),
utils.get_timestamp("2012-01-01"),
authors=["hg@mozilla.com"],
),
)
fi = hi.get(path, authors=['hg@mozilla.com', 'philringnalda@gmail.com'])
self.assertEqual(len(fi['authors']), 2)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['count'], 1)
self.assertEqual(len(fi['authors']['philringnalda@gmail.com']['reviewers']), 1)
self.assertEqual(fi['authors']['philringnalda@gmail.com']['reviewers']['gerv'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['count'], 1)
self.assertEqual(fi['authors']['hg@mozilla.com']['reviewers'], {})
self.assertEqual(fi['bugs'], set(['547914']))
self.assertEqual(len(fi['patches']), 2)
self.assertEqual(fi['patches'][0]['user'], 'philringnalda@gmail.com')
self.assertEqual(fi['patches'][1]['user'], 'hg@mozilla.com')
fi = hi.get(path, authors=["hg@mozilla.com", "philringnalda@gmail.com"])
self.assertEqual(len(fi["authors"]), 2)
self.assertEqual(fi["authors"]["philringnalda@gmail.com"]["count"], 1)
self.assertEqual(len(fi["authors"]["philringnalda@gmail.com"]["reviewers"]), 1)
self.assertEqual(
fi["authors"]["philringnalda@gmail.com"]["reviewers"]["gerv"], 1
)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["count"], 1)
self.assertEqual(fi["authors"]["hg@mozilla.com"]["reviewers"], {})
self.assertEqual(fi["bugs"], set(["547914"]))
self.assertEqual(len(fi["patches"]), 2)
self.assertEqual(fi["patches"][0]["user"], "philringnalda@gmail.com")
self.assertEqual(fi["patches"][1]["user"], "hg@mozilla.com")
@responses.activate
def test_hgfileinfo_multiple(self):
path1 = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
path2 = 'LICENSE'
path1 = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
path2 = "LICENSE"
hi = HGFileInfo([path1, path2])
fi1 = hi.get(path1)
fi2 = hi.get(path2)
self.assertIn('authors', fi1)
self.assertIn('authors', fi2)
self.assertIsNotNone(fi1['authors'])
self.assertIsNotNone(fi2['authors'])
self.assertIn('bugs', fi1)
self.assertIn('bugs', fi2)
self.assertIsNotNone(fi1['bugs'])
self.assertIsNotNone(fi2['bugs'])
self.assertIn("authors", fi1)
self.assertIn("authors", fi2)
self.assertIsNotNone(fi1["authors"])
self.assertIsNotNone(fi2["authors"])
self.assertIn("bugs", fi1)
self.assertIn("bugs", fi2)
self.assertIsNotNone(fi1["bugs"])
self.assertIsNotNone(fi2["bugs"])
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,6 +3,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata import hgmozilla
from libmozdata.connection import Query
@ -10,125 +11,150 @@ from libmozdata.connection import Query
class RevisionTest(unittest.TestCase):
def test_revision(self):
rev = hgmozilla.Revision.get_revision()
self.assertIn('pushid', rev)
self.assertIn('pushuser', rev)
self.assertIn('pushdate', rev)
self.assertIn('user', rev)
self.assertIn('branch', rev)
self.assertIn('date', rev)
self.assertIn('desc', rev)
self.assertIn("pushid", rev)
self.assertIn("pushuser", rev)
self.assertIn("pushdate", rev)
self.assertIn("user", rev)
self.assertIn("branch", rev)
self.assertIn("date", rev)
self.assertIn("desc", rev)
def test_revisions(self):
data1 = {
'first': {},
'second': {},
}
data1 = {"first": {}, "second": {}}
data2 = {}
def handler1(json, data):
if 'tip' in json['tags']:
data['first'].update(json)
if "tip" in json["tags"]:
data["first"].update(json)
else:
data['second'].update(json)
data["second"].update(json)
def handler2(json, data):
data.update(json)
hgmozilla.Revision(queries=[
Query(hgmozilla.Revision.get_url('nightly'), [{'node': 'default'}, {'node': '1584ba8c1b86'}], handler1, data1),
Query(hgmozilla.Revision.get_url('nightly'), {'node': 'default'}, handler2, data2),
]).wait()
hgmozilla.Revision(
queries=[
Query(
hgmozilla.Revision.get_url("nightly"),
[{"node": "default"}, {"node": "1584ba8c1b86"}],
handler1,
data1,
),
Query(
hgmozilla.Revision.get_url("nightly"),
{"node": "default"},
handler2,
data2,
),
]
).wait()
for rev in [data1['first'], data1['second'], data2]:
self.assertIn('pushid', rev)
self.assertIn('pushuser', rev)
self.assertIn('pushdate', rev)
self.assertIn('user', rev)
self.assertIn('branch', rev)
self.assertIn('date', rev)
self.assertIn('desc', rev)
self.assertIn('node', rev)
for rev in [data1["first"], data1["second"], data2]:
self.assertIn("pushid", rev)
self.assertIn("pushuser", rev)
self.assertIn("pushdate", rev)
self.assertIn("user", rev)
self.assertIn("branch", rev)
self.assertIn("date", rev)
self.assertIn("desc", rev)
self.assertIn("node", rev)
self.assertEqual(data1['second']['node'], '1584ba8c1b86f9c4de5ccda5241cef36e80f042c')
self.assertNotEqual(data1['first']['node'], data1['second']['node'])
self.assertEqual(data1['first']['node'], data2['node'])
self.assertEqual(
data1["second"]["node"], "1584ba8c1b86f9c4de5ccda5241cef36e80f042c"
)
self.assertNotEqual(data1["first"]["node"], data1["second"]["node"])
self.assertEqual(data1["first"]["node"], data2["node"])
class RawRevisionTest(unittest.TestCase):
def test_revision(self):
rev = hgmozilla.RawRevision.get_revision('central', '1584ba8c1b86')
self.assertIn('# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c', rev)
rev = hgmozilla.RawRevision.get_revision("central", "1584ba8c1b86")
self.assertIn("# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c", rev)
def test_revisions(self):
data1 = {
'first': None,
'second': None,
}
data2 = {
'first': None
}
data1 = {"first": None, "second": None}
data2 = {"first": None}
def handler1(response):
if '1584ba8c1b86' in response:
data1['first'] = response
elif 'f5578fdc50ef' in response:
data1['second'] = response
if "1584ba8c1b86" in response:
data1["first"] = response
elif "f5578fdc50ef" in response:
data1["second"] = response
def handler2(response):
data2['first'] = response
data2["first"] = response
hgmozilla.Revision(queries=[
Query(hgmozilla.RawRevision.get_url('nightly'), [{'node': 'f5578fdc50ef'}, {'node': '1584ba8c1b86'}], handler1),
Query(hgmozilla.RawRevision.get_url('nightly'), {'node': '1584ba8c1b86'}, handler2),
]).wait()
hgmozilla.Revision(
queries=[
Query(
hgmozilla.RawRevision.get_url("nightly"),
[{"node": "f5578fdc50ef"}, {"node": "1584ba8c1b86"}],
handler1,
),
Query(
hgmozilla.RawRevision.get_url("nightly"),
{"node": "1584ba8c1b86"},
handler2,
),
]
).wait()
self.assertIn('# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c', data1['first'])
self.assertIn('# Node ID f5578fdc50ef11b7f12451c88297f327abb0e9da', data1['second'])
self.assertIn('# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c', data2['first'])
self.assertIn(
"# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c", data1["first"]
)
self.assertIn(
"# Node ID f5578fdc50ef11b7f12451c88297f327abb0e9da", data1["second"]
)
self.assertIn(
"# Node ID 1584ba8c1b86f9c4de5ccda5241cef36e80f042c", data2["first"]
)
class FileInfoTest(unittest.TestCase):
def test_fileinfo(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
info = hgmozilla.FileInfo.get(path)
self.assertIsNotNone(info)
self.assertIsNotNone(info['netwerk/protocol/http/nsHttpConnectionMgr.cpp'])
self.assertIsNotNone(info["netwerk/protocol/http/nsHttpConnectionMgr.cpp"])
def test_fileinfo_multiple_files(self):
paths = ['netwerk/protocol/http/nsHttpConnectionMgr.cpp', 'netwerk/protocol/http/nsHttpConnectionMgr.h']
paths = [
"netwerk/protocol/http/nsHttpConnectionMgr.cpp",
"netwerk/protocol/http/nsHttpConnectionMgr.h",
]
info = hgmozilla.FileInfo.get(paths)
self.assertIsNotNone(info)
self.assertIsNotNone(info['netwerk/protocol/http/nsHttpConnectionMgr.cpp'])
self.assertIsNotNone(info['netwerk/protocol/http/nsHttpConnectionMgr.h'])
self.assertIsNotNone(info["netwerk/protocol/http/nsHttpConnectionMgr.cpp"])
self.assertIsNotNone(info["netwerk/protocol/http/nsHttpConnectionMgr.h"])
def test_fileinfo_release_channel(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
info = hgmozilla.FileInfo.get(path, 'release')
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
info = hgmozilla.FileInfo.get(path, "release")
self.assertIsNotNone(info)
self.assertIsNotNone(info['netwerk/protocol/http/nsHttpConnectionMgr.cpp'])
self.assertIsNotNone(info["netwerk/protocol/http/nsHttpConnectionMgr.cpp"])
class AnnotateTest(unittest.TestCase):
def test_annotate(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
info = hgmozilla.Annotate.get(path)
self.assertIsNotNone(info)
self.assertTrue(path in info)
annotations = info[path]
self.assertIsNotNone(annotations)
self.assertTrue('abspath' in annotations)
self.assertEqual(annotations['abspath'], path)
self.assertTrue('annotate' in annotations)
self.assertTrue("abspath" in annotations)
self.assertEqual(annotations["abspath"], path)
self.assertTrue("annotate" in annotations)
def test_annotate_multiple_files(self):
paths = ['netwerk/protocol/http/nsHttpConnectionMgr.cpp',
'netwerk/protocol/http/nsHttpConnectionMgr.h']
paths = [
"netwerk/protocol/http/nsHttpConnectionMgr.cpp",
"netwerk/protocol/http/nsHttpConnectionMgr.h",
]
info = hgmozilla.Annotate.get(paths)
self.assertIsNotNone(info)
@ -137,22 +163,22 @@ class AnnotateTest(unittest.TestCase):
self.assertTrue(path in info)
annotations = info[path]
self.assertIsNotNone(annotations)
self.assertTrue('abspath' in annotations)
self.assertEqual(annotations['abspath'], path)
self.assertTrue('annotate' in annotations)
self.assertTrue("abspath" in annotations)
self.assertEqual(annotations["abspath"], path)
self.assertTrue("annotate" in annotations)
def test_annotate_release_channel(self):
path = 'netwerk/protocol/http/nsHttpConnectionMgr.cpp'
info = hgmozilla.Annotate.get(path, 'release')
path = "netwerk/protocol/http/nsHttpConnectionMgr.cpp"
info = hgmozilla.Annotate.get(path, "release")
self.assertIsNotNone(info)
self.assertTrue(path in info)
annotations = info[path]
self.assertIsNotNone(annotations)
self.assertTrue('abspath' in annotations)
self.assertEqual(annotations['abspath'], path)
self.assertTrue('annotate' in annotations)
self.assertTrue("abspath" in annotations)
self.assertEqual(annotations["abspath"], path)
self.assertTrue("annotate" in annotations)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,40 +3,74 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata import modules
class ModulesTest(unittest.TestCase):
def test_module_from_path(self):
self.assertEqual(modules.module_from_path('xpcom/threads/nsEnvironment.cpp')['name'], 'XPCOM')
self.assertEqual(modules.module_from_path('xpcom/strinFile')['name'], 'XPCOM')
self.assertEqual(modules.module_from_path('xpcom/tests/component/TestComponent.cpp')['name'], 'XPCOM')
self.assertEqual(modules.module_from_path('xpcom/base/nsCycleCollector.h')['name'], 'Cycle Collector')
self.assertEqual(modules.module_from_path('xpcom/string/nsString.cpp')['name'], 'String')
self.assertEqual(modules.module_from_path('xpcom/string/')['name'], 'String')
self.assertEqual(modules.module_from_path('xpcom/string')['name'], 'String')
self.assertEqual(modules.module_from_path('tools/cvs2hg-import.py')['name'], 'Build Config')
self.assertIsNone(modules.module_from_path('doesntexist'))
self.assertEqual(
modules.module_from_path("xpcom/threads/nsEnvironment.cpp")["name"], "XPCOM"
)
self.assertEqual(modules.module_from_path("xpcom/strinFile")["name"], "XPCOM")
self.assertEqual(
modules.module_from_path("xpcom/tests/component/TestComponent.cpp")["name"],
"XPCOM",
)
self.assertEqual(
modules.module_from_path("xpcom/base/nsCycleCollector.h")["name"],
"Cycle Collector",
)
self.assertEqual(
modules.module_from_path("xpcom/string/nsString.cpp")["name"], "String"
)
self.assertEqual(modules.module_from_path("xpcom/string/")["name"], "String")
self.assertEqual(modules.module_from_path("xpcom/string")["name"], "String")
self.assertEqual(
modules.module_from_path("tools/cvs2hg-import.py")["name"], "Build Config"
)
self.assertIsNone(modules.module_from_path("doesntexist"))
# Test heuristics
self.assertEqual(modules.module_from_path('old-configure.in')['name'], 'Build Config')
self.assertEqual(modules.module_from_path('python/mach/mach/dispatcher.py')['name'], 'Build Config')
self.assertEqual(
modules.module_from_path("old-configure.in")["name"], "Build Config"
)
self.assertEqual(
modules.module_from_path("python/mach/mach/dispatcher.py")["name"],
"Build Config",
)
self.assertEqual(modules.module_from_path('js/public/GCPolicyAPI.h')['name'], 'JavaScript')
self.assertEqual(
modules.module_from_path("js/public/GCPolicyAPI.h")["name"], "JavaScript"
)
self.assertEqual(modules.module_from_path('security/certverifier/CertVerifier.cpp')['name'], 'security')
self.assertEqual(modules.module_from_path('security/pkix/lib/pkixnames.cpp')['name'], 'security')
self.assertEqual(modules.module_from_path('security/manager/')['name'], 'Security - Mozilla PSM Glue')
self.assertEqual(
modules.module_from_path("security/certverifier/CertVerifier.cpp")["name"],
"security",
)
self.assertEqual(
modules.module_from_path("security/pkix/lib/pkixnames.cpp")["name"],
"security",
)
self.assertEqual(
modules.module_from_path("security/manager/")["name"],
"Security - Mozilla PSM Glue",
)
self.assertEqual(modules.module_from_path('tools/profiler/core/platform.h')['name'], 'Code Analysis and Debugging Tools')
self.assertEqual(modules.module_from_path('tools/update-packaging/')['name'], 'Build and Release Tools')
self.assertEqual(
modules.module_from_path("tools/profiler/core/platform.h")["name"],
"Code Analysis and Debugging Tools",
)
self.assertEqual(
modules.module_from_path("tools/update-packaging/")["name"],
"Build and Release Tools",
)
def test_module_info(self):
self.assertEqual(modules.module_info('XPCOM')['name'], 'XPCOM')
self.assertEqual(modules.module_info('xpcom')['name'], 'XPCOM')
self.assertIsNone(modules.module_info('DoesntExist'))
self.assertEqual(modules.module_info("XPCOM")["name"], "XPCOM")
self.assertEqual(modules.module_info("xpcom")["name"], "XPCOM")
self.assertIsNone(modules.module_info("DoesntExist"))
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -2,11 +2,10 @@ import unittest
class PhabricatorTest(unittest.TestCase):
def test_import(self):
'''
"""
Simply import the library to check that all requirements are available
'''
"""
from libmozdata.phabricator import PhabricatorAPI # noqa
assert True

Просмотреть файл

@ -4,9 +4,10 @@
import unittest
from datetime import timedelta
from libmozdata.redash import Redash
import libmozdata.utils as utils
import libmozdata.versions
from libmozdata.redash import Redash
class RedashTest(unittest.TestCase):
@ -16,15 +17,17 @@ class RedashTest(unittest.TestCase):
versions = libmozdata.versions.get()
end_date = utils.get_date_ymd('yesterday')
end_date = utils.get_date_ymd("yesterday")
start_date = utils.get_date_ymd(end_date - timedelta(10))
for channel in ['release', 'beta', 'nightly']:
khours = Redash.get_khours(start_date, end_date, channel, [versions[channel]], 'Firefox')
for channel in ["release", "beta", "nightly"]:
khours = Redash.get_khours(
start_date, end_date, channel, [versions[channel]], "Firefox"
)
self.assertEqual(len(khours), 11)
for i in range(11):
self.assertIn(start_date + timedelta(i), khours)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,6 +3,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata import release_calendar as rc
@ -12,5 +13,5 @@ class ReleaseCalendarTest(unittest.TestCase):
self.assertIsNotNone(cal)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,6 +3,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata import release_owners as ro
@ -12,5 +13,5 @@ class ReleaseOwnersTest(unittest.TestCase):
self.assertIsNotNone(owners)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -3,35 +3,42 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from libmozdata import socorro
class SuperSearchTest(unittest.TestCase):
def test_search(self):
data = {}
socorro.SuperSearch(params={'product': 'Firefox',
'signature': '~OOM',
'_columns': ['uuid', 'build_id'],
'_results_number': 0,
'_facets': ['product']},
handler=lambda j, d: d.update(j),
handlerdata=data).wait()
socorro.SuperSearch(
params={
"product": "Firefox",
"signature": "~OOM",
"_columns": ["uuid", "build_id"],
"_results_number": 0,
"_facets": ["product"],
},
handler=lambda j, d: d.update(j),
handlerdata=data,
).wait()
self.assertIsNotNone(data)
class ProcessedCrashTest(unittest.TestCase):
def test_processed(self):
uuid = []
socorro.SuperSearch(params={'product': 'Firefox',
'signature': '~OOM',
'_columns': ['uuid'],
'_results_number': 2,
'_facets': ['product']},
handler=lambda j, d: d.extend([j['hits'][0]['uuid'], j['hits'][1]['uuid']]),
handlerdata=uuid).wait()
socorro.SuperSearch(
params={
"product": "Firefox",
"signature": "~OOM",
"_columns": ["uuid"],
"_results_number": 2,
"_facets": ["product"],
},
handler=lambda j, d: d.extend([j["hits"][0]["uuid"], j["hits"][1]["uuid"]]),
handlerdata=uuid,
).wait()
self.assertEqual(len(uuid), 2)
processed = socorro.ProcessedCrash.get_processed(uuid)
@ -39,20 +46,28 @@ class ProcessedCrashTest(unittest.TestCase):
class BugsTest(unittest.TestCase):
def test_bugs(self):
signature = []
socorro.SuperSearch(params={'product': 'Firefox',
'signature': '~OOM',
'_results_number': 0,
'_facets': ['signature']},
handler=lambda j, d: d.extend([j['facets']['signature'][0]['term'], j['facets']['signature'][1]['term']]),
handlerdata=signature).wait()
socorro.SuperSearch(
params={
"product": "Firefox",
"signature": "~OOM",
"_results_number": 0,
"_facets": ["signature"],
},
handler=lambda j, d: d.extend(
[
j["facets"]["signature"][0]["term"],
j["facets"]["signature"][1]["term"],
]
),
handlerdata=signature,
).wait()
self.assertEqual(len(signature), 2)
bugs = socorro.Bugs.get_bugs(signature)
self.assertIsNotNone(bugs)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -2,66 +2,78 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import datetime
from dateutil.tz import tzutc
import math
from libmozdata import utils
import time
import unittest
from dateutil.tz import tzutc
from libmozdata import utils
class UtilsTest(unittest.TestCase):
def test_get_best(self):
self.assertIsNone(utils.get_best(None))
self.assertIsNone(utils.get_best({}))
self.assertEqual(utils.get_best({'key1': 7, 'key2': 99, 'key3': 4}), 'key2')
self.assertEqual(utils.get_best({"key1": 7, "key2": 99, "key3": 4}), "key2")
def test_get_timestamp(self):
date = '1991-04-16'
date = "1991-04-16"
self.assertEqual(utils.get_timestamp(date), 671760000)
self.assertEqual(utils.get_timestamp(datetime.datetime.strptime(date, '%Y-%m-%d')), 671760000)
self.assertGreater(utils.get_timestamp('today'), utils.get_timestamp(date))
ts1 = utils.get_timestamp('now')
self.assertEqual(
utils.get_timestamp(datetime.datetime.strptime(date, "%Y-%m-%d")), 671760000
)
self.assertGreater(utils.get_timestamp("today"), utils.get_timestamp(date))
ts1 = utils.get_timestamp("now")
time.sleep(1.01)
ts2 = utils.get_timestamp('now')
ts2 = utils.get_timestamp("now")
self.assertGreater(ts2, ts1)
def test_get_date_ymd(self):
self.assertIsNotNone(utils.get_date_ymd('today'))
self.assertIsNotNone(utils.get_date_ymd('yesterday'))
self.assertIsNotNone(utils.get_date_ymd('tomorrow'))
self.assertTrue(utils.get_date_ymd('yesterday') < utils.get_date_ymd('today') < utils.get_date_ymd('tomorrow'))
date = utils.as_utc(datetime.datetime.strptime('1991-04-16', '%Y-%m-%d'))
self.assertEqual(utils.get_date_ymd('1991/04/16'), date)
self.assertEqual(utils.get_date_ymd('1991-04-16'), date)
self.assertEqual(utils.get_date_ymd('1991 04 16'), date)
self.assertEqual(utils.get_date_ymd('04/16/1991'), date)
self.assertEqual(utils.get_date_ymd('16/04/1991'), date)
self.assertEqual(utils.get_date_ymd('1991-04-16 12:00:00'), utils.as_utc(datetime.datetime(1991, 4, 16, 12, 0)))
self.assertIsNotNone(utils.get_date_ymd("today"))
self.assertIsNotNone(utils.get_date_ymd("yesterday"))
self.assertIsNotNone(utils.get_date_ymd("tomorrow"))
self.assertTrue(
utils.get_date_ymd("yesterday")
< utils.get_date_ymd("today")
< utils.get_date_ymd("tomorrow")
)
date = utils.as_utc(datetime.datetime.strptime("1991-04-16", "%Y-%m-%d"))
self.assertEqual(utils.get_date_ymd("1991/04/16"), date)
self.assertEqual(utils.get_date_ymd("1991-04-16"), date)
self.assertEqual(utils.get_date_ymd("1991 04 16"), date)
self.assertEqual(utils.get_date_ymd("04/16/1991"), date)
self.assertEqual(utils.get_date_ymd("16/04/1991"), date)
self.assertEqual(
utils.get_date_ymd("1991-04-16 12:00:00"),
utils.as_utc(datetime.datetime(1991, 4, 16, 12, 0)),
)
with self.assertRaises(Exception):
utils.get_date_ymd('')
utils.get_date_ymd("")
with self.assertRaises(Exception):
utils.get_date_ymd('marco')
utils.get_date_ymd("marco")
def test_get_today(self):
self.assertIsNotNone(utils.get_today())
def test_get_date_str(self):
date = '1991-04-16'
self.assertEqual(utils.get_date_str(datetime.datetime.strptime(date, '%Y-%m-%d')), date)
date = "1991-04-16"
self.assertEqual(
utils.get_date_str(datetime.datetime.strptime(date, "%Y-%m-%d")), date
)
def test_get_date(self):
self.assertEqual(utils.get_date('1991/04/16'), '1991-04-16')
self.assertEqual(utils.get_date('1991/04/16', 1), '1991-04-15')
self.assertEqual(utils.get_date("1991/04/16"), "1991-04-16")
self.assertEqual(utils.get_date("1991/04/16", 1), "1991-04-15")
def test_get_now_timestamp(self):
date = '1991-04-16'
date = "1991-04-16"
self.assertGreater(utils.get_now_timestamp(), utils.get_timestamp(date))
def test_date_from_timestamp(self):
date = '1975-03-16'
date = "1975-03-16"
dt = utils.get_date_ymd(date)
ts = utils.get_timestamp(dt)
self.assertEqual(dt, datetime.datetime(1975, 3, 16, tzinfo=tzutc()))
@ -70,19 +82,19 @@ class UtilsTest(unittest.TestCase):
self.assertEqual(new_dt, dt)
def test_is64(self):
self.assertTrue(utils.is64('64bit'))
self.assertTrue(utils.is64('A 64 bit machine'))
self.assertFalse(utils.is64('A 32 bit machine'))
self.assertTrue(utils.is64("64bit"))
self.assertTrue(utils.is64("A 64 bit machine"))
self.assertFalse(utils.is64("A 32 bit machine"))
def test_percent(self):
self.assertEqual(utils.percent(0.23), '23%')
self.assertEqual(utils.percent(1), '100%')
self.assertEqual(utils.percent(1.5), '150%')
self.assertEqual(utils.percent(0.23), "23%")
self.assertEqual(utils.percent(1), "100%")
self.assertEqual(utils.percent(1.5), "150%")
def test_simple_percent(self):
self.assertEqual(utils.simple_percent(3), '3%')
self.assertEqual(utils.simple_percent(3.0), '3%')
self.assertEqual(utils.simple_percent(3.5), '3.5%')
self.assertEqual(utils.simple_percent(3), "3%")
self.assertEqual(utils.simple_percent(3.0), "3%")
self.assertEqual(utils.simple_percent(3.5), "3.5%")
def test_get_sample(self):
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
@ -93,8 +105,14 @@ class UtilsTest(unittest.TestCase):
self.assertEqual(len(utils.get_sample(arr, 0.1)), 1)
def test_get_date_from_buildid(self):
self.assertEqual(utils.get_date_from_buildid('20160407164938'), datetime.datetime(2016, 4, 7, 23, 49, 38, tzinfo=tzutc()))
self.assertEqual(utils.get_date_from_buildid(20160407164938), datetime.datetime(2016, 4, 7, 23, 49, 38, tzinfo=tzutc()))
self.assertEqual(
utils.get_date_from_buildid("20160407164938"),
datetime.datetime(2016, 4, 7, 23, 49, 38, tzinfo=tzutc()),
)
self.assertEqual(
utils.get_date_from_buildid(20160407164938),
datetime.datetime(2016, 4, 7, 23, 49, 38, tzinfo=tzutc()),
)
def test_rate(self):
self.assertEqual(utils.rate(1.0, 2.0), 0.5)
@ -107,47 +125,74 @@ class UtilsTest(unittest.TestCase):
import glob
# Bugzilla bug
out = parse('This is about bUg 12345. What a great bug.')
self.assertEqual(out, '<div class="no-header">This is about <a href="https://bugzilla.mozilla.org/12345" target="_blank">Bug 12345</a>. What a great bug.</div>')
out = parse("This is about bUg 12345. What a great bug.")
self.assertEqual(
out,
'<div class="no-header">This is about <a href="https://bugzilla.mozilla.org/12345" target="_blank">Bug 12345</a>. What a great bug.</div>',
)
# Simple link
out = parse('http://mozilla.org')
self.assertEqual(out, '<div class="no-header"><a href="http://mozilla.org" target="_blank">http://mozilla.org</a></div>')
out = parse("http://mozilla.org")
self.assertEqual(
out,
'<div class="no-header"><a href="http://mozilla.org" target="_blank">http://mozilla.org</a></div>',
)
# Complex link
out = parse('https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result')
self.assertEqual(out, '<div class="no-header"><a href="https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result" target="_blank">https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result</a></div>')
out = parse(
"https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result"
)
self.assertEqual(
out,
'<div class="no-header"><a href="https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result" target="_blank">https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints#Result</a></div>',
)
# Html escaped
out = parse('Bug on <select/> element')
self.assertEqual(out, '<div class="no-header">Bug on &lt;select/&gt; element</div>')
out = parse("Bug on <select/> element")
self.assertEqual(
out, '<div class="no-header">Bug on &lt;select/&gt; element</div>'
)
# Risky "risks and why"
out = parse('[Risks and why]: Medium.')
self.assertEqual(out, '<h1 class="risks-and-why risky">Risks and why</h1><div class="risks-and-why risky">Medium.</div>')
out = parse("[Risks and why]: Medium.")
self.assertEqual(
out,
'<h1 class="risks-and-why risky">Risks and why</h1><div class="risks-and-why risky">Medium.</div>',
)
# Risky string change
out = parse('[String/UUID change made/needed]: yes, we need a change')
self.assertEqual(out, '<h1 class="string-uuid-change risky">String/UUID change made/needed</h1><div class="string-uuid-change risky">yes, we need a change</div>')
out = parse('[String/UUID change made/needed]: N/A')
self.assertEqual(out, '<h1 class="string-uuid-change">String/UUID change made/needed</h1><div class="string-uuid-change">N/A</div>') # not risky
out = parse("[String/UUID change made/needed]: yes, we need a change")
self.assertEqual(
out,
'<h1 class="string-uuid-change risky">String/UUID change made/needed</h1><div class="string-uuid-change risky">yes, we need a change</div>',
)
out = parse("[String/UUID change made/needed]: N/A")
self.assertEqual(
out,
'<h1 class="string-uuid-change">String/UUID change made/needed</h1><div class="string-uuid-change">N/A</div>',
) # not risky
# Risky test coverage
out = parse('[Describe test coverage new/current, TreeHerder]: none')
self.assertEqual(out, '<h1 class="describe-test-coverage risky">Describe test coverage new/current, TreeHerder</h1><div class="describe-test-coverage risky">none</div>')
out = parse("[Describe test coverage new/current, TreeHerder]: none")
self.assertEqual(
out,
'<h1 class="describe-test-coverage risky">Describe test coverage new/current, TreeHerder</h1><div class="describe-test-coverage risky">none</div>',
)
# Full comments
for text_path in glob.glob('tests/uplift/*.txt'):
with open(text_path, 'r') as text:
for text_path in glob.glob("tests/uplift/*.txt"):
with open(text_path, "r") as text:
out = parse(text.read())
html_path = text_path[:-4] + '.html'
with open(html_path, 'r') as html:
html_path = text_path[:-4] + ".html"
with open(html_path, "r") as html:
self.assertEqual(out, html.read())
def test_get_params_for_url(self):
params = {'a': 1, 'abc': 2, 'efgh': 3, 'bcd': [4, 5, 6]}
self.assertEqual(utils.get_params_for_url(params), '?a=1&abc=2&bcd=4&bcd=5&bcd=6&efgh=3')
params = {"a": 1, "abc": 2, "efgh": 3, "bcd": [4, 5, 6]}
self.assertEqual(
utils.get_params_for_url(params), "?a=1&abc=2&bcd=4&bcd=5&bcd=6&efgh=3"
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Просмотреть файл

@ -2,23 +2,24 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import datetime
import unittest
from contextlib import contextmanager
import responses
from dateutil.tz import tzutc
import libmozdata.utils as utils
import libmozdata.versions as versions
from contextlib import contextmanager
import responses
class VersionsTest(unittest.TestCase):
def cleanup(self):
"""
Restore versions from cache after this test
Otherwise other tests will use the last loaded version
"""
versions.__dict__['__versions'] = None
versions.__dict__["__versions"] = None
def tearDown(self):
self.cleanup()
@ -46,82 +47,145 @@ class VersionsTest(unittest.TestCase):
def test_versions(self):
v = versions.get(base=True)
self.assertTrue(v['esr'] <= v['release'] <= v['beta'] <= v['nightly'])
self.assertTrue(v["esr"] <= v["release"] <= v["beta"] <= v["nightly"])
def test_version_dates(self):
self.assertEqual(versions.getMajorDate(46), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('46'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('46.0'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('46.0.1'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('1'), datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('1.0'), datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('1.5'), datetime.datetime(2005, 11, 29, 8, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('14'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('14.0'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('14.0.1'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('33'), datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('33.0'), datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate('33.1'), datetime.datetime(2014, 11, 10, 8, 0, tzinfo=tzutc()))
self.assertEqual(
versions.getMajorDate(46),
datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("46"),
datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("46.0"),
datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("46.0.1"),
datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("1"),
datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("1.0"),
datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("1.5"),
datetime.datetime(2005, 11, 29, 8, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("14"),
datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("14.0"),
datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("14.0.1"),
datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("33"),
datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("33.0"),
datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getMajorDate("33.1"),
datetime.datetime(2014, 11, 10, 8, 0, tzinfo=tzutc()),
)
self.assertEqual(versions.getMajorDate('46'), versions.getDate('46'))
self.assertEqual(versions.getMajorDate('46.0'), versions.getDate('46.0'))
self.assertNotEqual(versions.getMajorDate('48.0'), versions.getDate('48.0.1'))
self.assertEqual(versions.getDate('48.0.1'), datetime.datetime(2016, 8, 18, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getDate('48.0.2'), datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc()))
self.assertEqual(versions.getMajorDate("46"), versions.getDate("46"))
self.assertEqual(versions.getMajorDate("46.0"), versions.getDate("46.0"))
self.assertNotEqual(versions.getMajorDate("48.0"), versions.getDate("48.0.1"))
self.assertEqual(
versions.getDate("48.0.1"),
datetime.datetime(2016, 8, 18, 7, 0, tzinfo=tzutc()),
)
self.assertEqual(
versions.getDate("48.0.2"),
datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc()),
)
v = versions.get(base=True)
if versions.getMajorDate(v['nightly']) is not None:
self.assertTrue(versions.getMajorDate(v['release']) <= versions.getMajorDate(v['beta']) <= versions.getMajorDate(v['nightly']))
elif versions.getMajorDate(v['beta']) is not None:
self.assertTrue(versions.getMajorDate(v['release']) <= versions.getMajorDate(v['beta']))
if versions.getMajorDate(v["nightly"]) is not None:
self.assertTrue(
versions.getMajorDate(v["release"])
<= versions.getMajorDate(v["beta"])
<= versions.getMajorDate(v["nightly"])
)
elif versions.getMajorDate(v["beta"]) is not None:
self.assertTrue(
versions.getMajorDate(v["release"]) <= versions.getMajorDate(v["beta"])
)
date = utils.get_date_ymd('2011-08-24T14:52:52Z')
self.assertEqual(date - versions.getMajorDate('7'), datetime.timedelta(-34, 28372))
date = utils.get_date_ymd("2011-08-24T14:52:52Z")
self.assertEqual(
date - versions.getMajorDate("7"), datetime.timedelta(-34, 28372)
)
self.assertEqual(versions.getCloserMajorRelease(date), ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
self.assertEqual(versions.getCloserMajorRelease(date, negative=True), ('6.0', datetime.datetime(2011, 8, 16, 7, 0, tzinfo=tzutc())))
self.assertEqual(versions.getCloserMajorRelease(date, negative=False), ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
self.assertEqual(
versions.getCloserMajorRelease(date),
("7.0", datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())),
)
self.assertEqual(
versions.getCloserMajorRelease(date, negative=True),
("6.0", datetime.datetime(2011, 8, 16, 7, 0, tzinfo=tzutc())),
)
self.assertEqual(
versions.getCloserMajorRelease(date, negative=False),
("7.0", datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())),
)
date = utils.get_date_ymd('2016-08-19')
self.assertEqual(versions.getCloserRelease(date), ('48.0.2', datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc())))
self.assertEqual(versions.getCloserRelease(date, negative=True), ('48.0.1', datetime.datetime(2016, 8, 18, 7, 0, tzinfo=tzutc())))
self.assertEqual(versions.getCloserRelease(date, negative=False), ('48.0.2', datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc())))
date = utils.get_date_ymd("2016-08-19")
self.assertEqual(
versions.getCloserRelease(date),
("48.0.2", datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc())),
)
self.assertEqual(
versions.getCloserRelease(date, negative=True),
("48.0.1", datetime.datetime(2016, 8, 18, 7, 0, tzinfo=tzutc())),
)
self.assertEqual(
versions.getCloserRelease(date, negative=False),
("48.0.2", datetime.datetime(2016, 8, 24, 7, 0, tzinfo=tzutc())),
)
def test_dual_esr(self):
# Check esr & esr previous
with self.setup_versions(
nightly="55.0a1",
devel="54.0b6",
stable="53.0.2",
esr="45.9.0esr",
esr_next="52.1.1esr"):
nightly="55.0a1",
devel="54.0b6",
stable="53.0.2",
esr="45.9.0esr",
esr_next="52.1.1esr",
):
v = versions.get(base=True)
self.assertDictEqual(v, {
'nightly': 55,
'beta': 54,
'release': 53,
'esr': 52,
'esr_previous': 45,
})
self.assertDictEqual(
v, {"nightly": 55, "beta": 54, "release": 53, "esr": 52, "esr_previous": 45}
)
def test_unique_esr(self):
# Check no esr previous is present
with self.setup_versions(
nightly="55.0a1",
devel="54.0b6",
stable="53.0.2",
esr="52.1.1esr"):
nightly="55.0a1", devel="54.0b6", stable="53.0.2", esr="52.1.1esr"
):
v = versions.get(base=True)
self.assertDictEqual(v, {
'nightly': 55,
'beta': 54,
'release': 53,
'esr': 52,
'esr_previous': None,
})
self.assertDictEqual(
v,
{"nightly": 55, "beta": 54, "release": 53, "esr": 52, "esr_previous": None},
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main()

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,2 +0,0 @@
[flake8]
ignore = E501