bug1019528 - use of cachecontrol
This commit is contained in:
Родитель
2142460729
Коммит
06577e9cc2
|
@ -1,3 +1,5 @@
|
|||
mozcommitbuilder.egg-info/*
|
||||
dist/*
|
||||
*.pyc
|
||||
*.swp
|
||||
.DS_Store
|
||||
|
|
|
@ -7,7 +7,7 @@ import threading
|
|||
import datetime
|
||||
|
||||
from mozregression import errors
|
||||
from mozregression.utils import url_links
|
||||
from mozregression.utils import url_links, get_http_session
|
||||
|
||||
|
||||
class BuildData(object):
|
||||
|
@ -182,6 +182,7 @@ class BuildData(object):
|
|||
if isinstance(exc, requests.HTTPError):
|
||||
if nb_try < max_try:
|
||||
self._logger.warning("Got HTTPError - retrying")
|
||||
self._logger.warning(exc)
|
||||
must_raise = False
|
||||
if must_raise:
|
||||
raise errors.DownloadError(
|
||||
|
@ -243,7 +244,7 @@ class BuildFolderInfoFetcher(object):
|
|||
is found.
|
||||
"""
|
||||
data = {}
|
||||
response = requests.get(url)
|
||||
response = get_http_session().get(url)
|
||||
for line in response.text.splitlines():
|
||||
if '/rev/' in line:
|
||||
repository, changeset = line.split('/rev/')
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import mozinfo
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
import requests
|
||||
import copy
|
||||
import limitedfilecache
|
||||
from mozlog.structured import get_default_logger
|
||||
|
||||
from mozregression.build_data import InboundBuildData, BuildFolderInfoFetcher
|
||||
from mozregression.utils import url_links
|
||||
from mozregression.utils import url_links, get_http_session, set_http_cache_session, one_gigabyte
|
||||
|
||||
def get_repo_url(path='integration', inbound_branch='mozilla-inbound'):
|
||||
return "https://hg.mozilla.org/%s/%s" % (path, inbound_branch)
|
||||
|
@ -31,7 +31,7 @@ class PushLogsFinder(object):
|
|||
"""
|
||||
Returns pushlog json objects (python dicts) sorted by date.
|
||||
"""
|
||||
response = requests.get(self.pushlog_url())
|
||||
response = get_http_session().get(self.pushlog_url())
|
||||
response.raise_for_status()
|
||||
# sort pushlogs by date
|
||||
return sorted(response.json().itervalues(),
|
||||
|
@ -85,7 +85,7 @@ class BuildsFinder(object):
|
|||
return InboundBuildData(data, info_fetcher, raw_revisions)
|
||||
|
||||
|
||||
def cli(args=sys.argv[1:]):
|
||||
def get_build_finder(args):
|
||||
from mozregression.fetch_configs import create_config
|
||||
parser = OptionParser()
|
||||
parser.add_option("--start-rev", dest="start_rev", help="start revision")
|
||||
|
@ -101,6 +101,8 @@ def cli(args=sys.argv[1:]):
|
|||
parser.add_option("--inbound-branch", dest="inbound_branch",
|
||||
help="inbound branch name on ftp.mozilla.org",
|
||||
metavar="[tracemonkey|mozilla-1.9.2]", default=None)
|
||||
parser.add_option("--http-cache-dir", dest="http_cache_dir",
|
||||
help="the directory for caching http requests")
|
||||
|
||||
options, args = parser.parse_args(args)
|
||||
if not options.start_rev or not options.end_rev:
|
||||
|
@ -108,8 +110,16 @@ def cli(args=sys.argv[1:]):
|
|||
|
||||
fetch_config = create_config(options.app, options.os, options.bits)
|
||||
|
||||
build_finder = BuildsFinder(fetch_config)
|
||||
|
||||
cacheSession = limitedfilecache.get_cache(
|
||||
options.http_cache_dir, one_gigabyte,
|
||||
logger=get_default_logger('Limited File Cache'))
|
||||
|
||||
set_http_cache_session(cacheSession)
|
||||
|
||||
return BuildsFinder(fetch_config)
|
||||
|
||||
def cli(args=sys.argv[1:]):
|
||||
build_finder = get_build_finder(args)
|
||||
revisions = build_finder.get_build_infos(options.start_rev,
|
||||
options.end_rev,
|
||||
range=60*60*12)
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
import warnings
|
||||
|
||||
from cachecontrol import CacheControl
|
||||
from cachecontrol.caches import FileCache
|
||||
|
||||
def get_cache(directory, max_bytes, logger):
|
||||
forever = True if directory else False
|
||||
if forever:
|
||||
fc = LimitedFileCache(directory, forever=forever,
|
||||
max_bytes=max_bytes, logger=logger)
|
||||
return CacheControl(requests.session(), cache=fc)
|
||||
else:
|
||||
# not forever so just cache within this run
|
||||
return CacheControl(requests.session())
|
||||
|
||||
class LimitedFileCache(FileCache):
|
||||
def __init__(self, directory, forever=False, filemode=0o0600,
|
||||
dirmode=0o0700, max_bytes=1000000000, logger=warnings):
|
||||
FileCache.__init__(self, directory, forever, filemode, dirmode)
|
||||
self.max_bytes = max_bytes
|
||||
self.curr_bytes = 0
|
||||
self.logger = logger
|
||||
|
||||
def set(self, key, value):
|
||||
new_bytes = sys.getsizeof(value)
|
||||
total = (self.curr_bytes + new_bytes)
|
||||
if total > self.max_bytes:
|
||||
message = "Tried adding %d bytes but %d bytes are currently saved" \
|
||||
" in the cache and the max_bytes is set to %d.\n" % \
|
||||
(new_bytes, self.curr_bytes, self.max_bytes)
|
||||
self.logger.warn(message)
|
||||
return
|
||||
|
||||
FileCache.set(self, key, value)
|
||||
|
||||
self.curr_bytes += new_bytes
|
||||
|
||||
def delete(self, key):
|
||||
value = self.get(key)
|
||||
FileCache.delete(self, key)
|
||||
removed_bytes = sys.getsizeof(value)
|
||||
if not self.forever:
|
||||
self.curr_bytes -= removed_bytes
|
|
@ -9,12 +9,14 @@ import mozinfo
|
|||
import sys
|
||||
import math
|
||||
from argparse import ArgumentParser
|
||||
import limitedfilecache
|
||||
from mozlog.structured import commandline, get_default_logger
|
||||
|
||||
from mozregression import errors
|
||||
from mozregression import __version__
|
||||
from mozregression.utils import (parse_date, date_of_release, format_date,
|
||||
parse_bits)
|
||||
parse_bits, set_http_cache_session,
|
||||
one_gigabyte)
|
||||
from mozregression.inboundfinder import get_repo_url, BuildsFinder
|
||||
from mozregression.build_data import NightlyBuildData
|
||||
from mozregression.fetch_configs import create_config
|
||||
|
@ -130,7 +132,7 @@ class Bisector(object):
|
|||
len(revisions_left),
|
||||
compute_steps_left(len(revisions_left))))
|
||||
|
||||
def bisect_inbound(self, inbound_revisions=None):
|
||||
def prepare_bisect(self, inbound_revisions=None):
|
||||
self.found_repo = get_repo_url(inbound_branch=self.fetch_config.inbound_branch)
|
||||
|
||||
if inbound_revisions is None:
|
||||
|
@ -165,10 +167,13 @@ class Bisector(object):
|
|||
build_url = inbound_revisions[mid]['build_url']
|
||||
persist_prefix='%s-%s-' % (inbound_revisions[mid]['timestamp'],
|
||||
self.fetch_config.inbound_branch)
|
||||
launcher = create_launcher(self.fetch_config.app_name,
|
||||
return create_launcher(self.fetch_config.app_name,
|
||||
build_url,
|
||||
persist=self.options.persist,
|
||||
persist_prefix=persist_prefix)
|
||||
|
||||
def bisect_inbound(self, inbound_revisions=None):
|
||||
launcher = self.prepare_bisect(inbound_revisions)
|
||||
launcher.start()
|
||||
|
||||
verdict = self._get_verdict('inbound', offer_skip=False)
|
||||
|
@ -442,15 +447,21 @@ def parse_args():
|
|||
parser.add_argument("--persist",
|
||||
help="the directory in which files are to persist")
|
||||
|
||||
parser.add_argument("--http-cache-dir", dest="http_cache_dir",
|
||||
help="the directory for caching http requests")
|
||||
|
||||
commandline.add_logging_group(parser)
|
||||
options = parser.parse_args()
|
||||
options.bits = parse_bits(options.bits)
|
||||
return options
|
||||
|
||||
|
||||
def cli():
|
||||
default_bad_date = str(datetime.date.today())
|
||||
default_good_date = "2009-01-01"
|
||||
def get_default_dates():
|
||||
return (str(datetime.date.today()), "2009-01-01")
|
||||
|
||||
|
||||
def get_app():
|
||||
(default_bad_date, default_good_date) = get_default_dates()
|
||||
options = parse_args()
|
||||
logger = commandline.setup_logging("mozregression", options, {"mach": sys.stdout})
|
||||
|
||||
|
@ -461,6 +472,12 @@ def cli():
|
|||
# can go to inbound from nightly.
|
||||
fetch_config.set_inbound_branch(options.inbound_branch)
|
||||
|
||||
cacheSession = limitedfilecache.get_cache(
|
||||
options.http_cache_dir, one_gigabyte,
|
||||
logger=get_default_logger('Limited File Cache'))
|
||||
|
||||
set_http_cache_session(cacheSession)
|
||||
|
||||
if options.inbound:
|
||||
if not fetch_config.is_inbound():
|
||||
sys.exit('Unable to bissect inbound for `%s`' % fetch_config.app_name)
|
||||
|
@ -470,7 +487,7 @@ def cli():
|
|||
bisector = Bisector(fetch_config, options,
|
||||
last_good_revision=options.last_good_revision,
|
||||
first_bad_revision=options.first_bad_revision)
|
||||
app = bisector.bisect_inbound
|
||||
return bisector.bisect_inbound
|
||||
else:
|
||||
# TODO: currently every fetch_config is nightly aware. Shoud we test
|
||||
# for this to be sure here ?
|
||||
|
@ -504,7 +521,10 @@ def cli():
|
|||
% (options.good_date, options.good_release))
|
||||
|
||||
bisector = Bisector(fetch_config, options)
|
||||
app = bisector.bisect_nightlies
|
||||
return bisector.bisect_nightlies
|
||||
|
||||
def cli():
|
||||
app = get_app()
|
||||
try:
|
||||
app()
|
||||
except KeyboardInterrupt:
|
||||
|
|
|
@ -9,7 +9,9 @@ import sys
|
|||
from BeautifulSoup import BeautifulSoup
|
||||
import mozinfo
|
||||
import requests
|
||||
import limitedfilecache
|
||||
|
||||
from mozlog.structured import get_default_logger
|
||||
from mozregression import errors
|
||||
|
||||
class ClassRegistry(object):
|
||||
|
@ -28,6 +30,16 @@ class ClassRegistry(object):
|
|||
def get(self, name):
|
||||
return self._classes[name]
|
||||
|
||||
CACHE_SESSION = None
|
||||
one_gigabyte = 1000000000
|
||||
|
||||
def set_http_cache_session(a_cache_session):
|
||||
global CACHE_SESSION
|
||||
CACHE_SESSION = a_cache_session
|
||||
|
||||
def get_http_session():
|
||||
return CACHE_SESSION or requests
|
||||
|
||||
def format_date(date):
|
||||
return date.strftime('%Y-%m-%d')
|
||||
|
||||
|
@ -62,7 +74,7 @@ def download_url(url, dest=None):
|
|||
chunk_size = 16 * 1024
|
||||
bytes_so_far = 0.0
|
||||
tmp_file = dest + ".part"
|
||||
request = requests.get(url, stream=True)
|
||||
request = get_http_session().get(url, stream=True)
|
||||
total_size = int(request.headers['Content-length'].strip())
|
||||
if dest is None:
|
||||
dest = os.path.basename(url)
|
||||
|
@ -84,7 +96,7 @@ def download_url(url, dest=None):
|
|||
|
||||
|
||||
def url_links(url, regex=None, auth=None):
|
||||
response = requests.get(url, auth=auth)
|
||||
response = get_http_session().get(url, auth=auth)
|
||||
response.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(response.text)
|
||||
|
|
7
setup.py
7
setup.py
|
@ -26,8 +26,7 @@ setup(name="mozregression",
|
|||
moznightly = mozregression.runnightly:cli
|
||||
""",
|
||||
platforms =['Any'],
|
||||
install_requires = ['httplib2 >= 0.6.0',
|
||||
'mozcommitbuilder >= 0.4.10',
|
||||
install_requires = ['mozcommitbuilder >= 0.4.10',
|
||||
'mozfile >= 0.1',
|
||||
'mozprofile >= 0.4',
|
||||
'mozrunner >= 6.5',
|
||||
|
@ -36,7 +35,9 @@ setup(name="mozregression",
|
|||
'mozinfo >= 0.4',
|
||||
'mozlog >= 2.7',
|
||||
'mozversion >= 0.5',
|
||||
'requests >= 2.1',
|
||||
'requests >= 2.5.0',
|
||||
'cachecontrol >= 0.10.2',
|
||||
'lockfile >= 0.10.2', # used in conjunction with cachecontrol
|
||||
'futures >= 2.1.6',
|
||||
'mozdevice >= 0.43'
|
||||
],
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
import unittest
|
||||
from mock import patch, Mock
|
||||
from mozregression import errors
|
||||
from mozregression.regression import Bisector
|
||||
from mozregression.fetch_configs import FirefoxConfig
|
||||
from mozregression import launchers
|
||||
from test_build_data import MyBuildData
|
||||
|
||||
class MyBisector(Bisector):
|
||||
def __init__(self, fetch_config, options,
|
||||
last_good_revision=None, first_bad_revision=None):
|
||||
self.last_good_revision = last_good_revision
|
||||
self.first_bad_revision = first_bad_revision
|
||||
self.fetch_config = fetch_config
|
||||
self.options = options
|
||||
self.http_cache_dir = options.http_cache_dir
|
||||
self._logger = Mock(info=lambda a: None)
|
||||
|
||||
class TestBisector(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.fetch_config = Mock(inbound_branch='my-inbound-branch')
|
||||
self.persist = None
|
||||
self.options = Mock(persist=self.persist)
|
||||
self.bisector = MyBisector(self.fetch_config, self.options,
|
||||
'17de0f463944', '035a951fc24a')
|
||||
self.build_data = MyBuildData(range(20))
|
||||
|
||||
@staticmethod
|
||||
def fake_create_launcher(name, url, persist=None, persist_prefix=''):
|
||||
return (name, url, persist, persist_prefix)
|
||||
|
||||
@patch('mozregression.regression.create_launcher')
|
||||
@patch('mozregression.build_data.BuildData.__getitem__')
|
||||
@patch('mozregression.inboundfinder.BuildsFinder.get_build_infos')
|
||||
def test_prepare_bisect(self, get_build_infos, getitem, create_launcher):
|
||||
get_build_infos.return_value = self.build_data
|
||||
|
||||
expected_base = 'http://some_url'
|
||||
expected_full_url = '%s-%s-' % (expected_base,
|
||||
self.fetch_config.inbound_branch)
|
||||
mid_mock = Mock(__getitem__= lambda a, b: expected_base)
|
||||
getitem.return_value = mid_mock
|
||||
|
||||
create_launcher.side_effect = TestBisector.fake_create_launcher
|
||||
|
||||
self.assertEqual(self.bisector.prepare_bisect()[1:],
|
||||
(expected_base, self.persist, expected_full_url))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,6 +1,8 @@
|
|||
import unittest
|
||||
from mock import patch, Mock
|
||||
import requests
|
||||
from mozregression import inboundfinder, errors
|
||||
from mozregression import utils
|
||||
|
||||
class TestPushLogsFinder(unittest.TestCase):
|
||||
def test_pushlog_url(self):
|
||||
|
@ -28,3 +30,23 @@ class TestPushLogsFinder(unittest.TestCase):
|
|||
{'date': 5},
|
||||
{'date': 12},
|
||||
])
|
||||
|
||||
class TestInboundFinder(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.addCleanup(utils.set_http_cache_session, None)
|
||||
|
||||
def test_exit_on_omitted_start_and_end_rev(self):
|
||||
with self.assertRaises(SystemExit) as se:
|
||||
actual = inboundfinder.get_build_finder([])
|
||||
|
||||
self.assertEqual(se.exception.code,
|
||||
'start revision and end revision must be specified')
|
||||
|
||||
def test_get_app(self):
|
||||
argv = ['inboundfinder', '--start-rev=1', '--end-rev=2',]
|
||||
actual = inboundfinder.get_build_finder(argv)
|
||||
self.assertTrue(isinstance(actual, inboundfinder.BuildsFinder))
|
||||
|
||||
# default is to use a cache session
|
||||
a_session = utils.get_http_session()
|
||||
self.assertTrue(isinstance(a_session, requests.Session))
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import unittest
|
||||
from mock import patch, Mock
|
||||
import datetime
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
import requests
|
||||
from mozregression.regression import get_app, Bisector, get_default_dates
|
||||
from mozregression import utils
|
||||
|
||||
class TestRegression(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.addCleanup(utils.set_http_cache_session, None)
|
||||
|
||||
def verity_options(self, bisector, bad_date, good_date, app='firefox'):
|
||||
self.assertEquals(bisector.options.app, app)
|
||||
self.assertEquals(bisector.options.bad_date, bad_date)
|
||||
self.assertEquals(bisector.options.good_date, good_date)
|
||||
|
||||
@patch('sys.argv')
|
||||
def test_get_app(self, argv):
|
||||
argv = ['mozregression',]
|
||||
actual = get_app()
|
||||
actual_self = actual.__self__
|
||||
self.assertTrue(isinstance(actual_self, Bisector))
|
||||
|
||||
# default dates are used with empty args
|
||||
(bad_date, good_date) = get_default_dates()
|
||||
self.verity_options(actual_self, bad_date, good_date)
|
||||
|
||||
# default is to use a cache session
|
||||
a_session = utils.get_http_session()
|
||||
self.assertTrue(isinstance(a_session, requests.Session))
|
|
@ -4,6 +4,9 @@ import datetime
|
|||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
import requests
|
||||
from cachecontrol import CacheControl
|
||||
import mozregression.limitedfilecache
|
||||
from mozregression import utils, errors
|
||||
|
||||
class TestUrlLinks(unittest.TestCase):
|
||||
|
@ -11,7 +14,7 @@ class TestUrlLinks(unittest.TestCase):
|
|||
def test_url_no_links(self, get):
|
||||
get.return_value = Mock(text='')
|
||||
self.assertEquals(utils.url_links(''), [])
|
||||
|
||||
|
||||
@patch('requests.get')
|
||||
def test_url_with_links(self, get):
|
||||
get.return_value = Mock(text="""
|
||||
|
@ -20,7 +23,8 @@ class TestUrlLinks(unittest.TestCase):
|
|||
<a href="thing2/">thing2</a>
|
||||
</body>
|
||||
""")
|
||||
self.assertEquals(utils.url_links(''), ['thing/', 'thing2/'])
|
||||
self.assertEquals(utils.url_links(''),
|
||||
['thing/', 'thing2/'])
|
||||
|
||||
@patch('requests.get')
|
||||
def test_url_with_links_regex(self, get):
|
||||
|
@ -30,7 +34,9 @@ class TestUrlLinks(unittest.TestCase):
|
|||
<a href="thing2/">thing2</a>
|
||||
</body>
|
||||
""")
|
||||
self.assertEquals(utils.url_links('', regex="thing2.*"), ['thing2/'])
|
||||
self.assertEquals(
|
||||
utils.url_links('', regex="thing2.*"),
|
||||
['thing2/'])
|
||||
|
||||
class TestParseDate(unittest.TestCase):
|
||||
def test_valid_date(self):
|
||||
|
@ -38,7 +44,8 @@ class TestParseDate(unittest.TestCase):
|
|||
self.assertEquals(date, datetime.date(2014, 7, 5))
|
||||
|
||||
def test_invalid_date(self):
|
||||
self.assertRaises(errors.DateFormatError, utils.parse_date, "invalid_format")
|
||||
self.assertRaises(errors.DateFormatError, utils.parse_date,
|
||||
"invalid_format")
|
||||
|
||||
class TestParseBits(unittest.TestCase):
|
||||
@patch('mozregression.utils.mozinfo')
|
||||
|
@ -113,5 +120,34 @@ class TestRelease(unittest.TestCase):
|
|||
self.assertEquals(date, None)
|
||||
|
||||
|
||||
class TestHTTPCache(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.addCleanup(utils.set_http_cache_session, None)
|
||||
|
||||
def make_cache(self):
|
||||
return mozregression.limitedfilecache.get_cache(
|
||||
'/fakedir', utils.one_gigabyte, logger=None)
|
||||
|
||||
def test_basic(self):
|
||||
self.assertEquals(utils.get_http_session(), requests)
|
||||
|
||||
def test_none_returns_requests(self):
|
||||
utils.set_http_cache_session(self.make_cache())
|
||||
utils.set_http_cache_session(None)
|
||||
self.assertEquals(utils.get_http_session(), requests)
|
||||
|
||||
def test_get_http_session(self):
|
||||
utils.set_http_cache_session(self.make_cache())
|
||||
a_session = utils.get_http_session()
|
||||
|
||||
# verify session exists
|
||||
self.assertTrue(isinstance(a_session, requests.Session))
|
||||
|
||||
# turns out CacheControl is just a function not a class
|
||||
# so it makes verifying that we're actually using it
|
||||
# a little messy
|
||||
for k, v in a_session.adapters.items():
|
||||
self.assertTrue(isinstance(v.cache, mozregression.limitedfilecache.LimitedFileCache))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
Загрузка…
Ссылка в новой задаче