Several style and import improvements, including removal of Python 2.6 support (#296)

This commit is contained in:
Timur Timirkhanov 2015-06-04 20:01:42 +06:00 коммит произвёл Henrik Skupin
Родитель 41f50ade4d
Коммит 6115077aac
19 изменённых файлов: 129 добавлений и 108 удалений

Просмотреть файл

@ -1,12 +1,15 @@
language: python
python:
- "2.6"
- "2.7"
install:
- pip install pep8
- pip install pylama
before_script:
- "pep8 mozdownload"
- pylama mozdownload
script: ./run_tests.py
notifications:
email:
- dev-automation@lists.mozilla.org

Просмотреть файл

@ -2,4 +2,20 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from scraper import *
from .scraper import (Scraper,
DailyScraper,
DirectScraper,
ReleaseScraper,
ReleaseCandidateScraper,
TinderboxScraper,
TryScraper,
)
__all__ = [Scraper,
DailyScraper,
DirectScraper,
ReleaseScraper,
ReleaseCandidateScraper,
TinderboxScraper,
TryScraper,
]

29
mozdownload/errors.py Normal file
Просмотреть файл

@ -0,0 +1,29 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
class NotSupportedError(Exception):
"""Exception for a build not being supported"""
def __init__(self, message):
Exception.__init__(self, message)
class NotFoundError(Exception):
"""Exception for a resource not being found (e.g. no logs)"""
def __init__(self, message, location):
self.location = location
Exception.__init__(self, ': '.join([message, location]))
class NotImplementedError(Exception):
"""Exception for a feature which is not implemented yet"""
def __init__(self, message):
Exception.__init__(self, message)
class TimeoutError(Exception):
"""Exception for a download exceeding the allocated timeout"""
def __init__(self):
self.message = 'The download exceeded the allocated timeout'
Exception.__init__(self, self.message)

Просмотреть файл

@ -17,12 +17,14 @@ from urlparse import urlparse
import mozinfo
import mozlog
import progressbar as pb
import errors
from parser import DirectoryParser
from timezones import PacificTimezone
from utils import urljoin
import progressbar as pb
version = pkg_resources.require("mozdownload")[0].version
@ -66,32 +68,6 @@ MULTI_LOCALE_APPLICATIONS = ('b2g', 'fennec')
APPLICATION_TO_FTP_DIRECTORY = {'fennec': 'mobile'}
class NotSupportedError(Exception):
"""Exception for a build not being supported"""
def __init__(self, message):
Exception.__init__(self, message)
class NotFoundError(Exception):
"""Exception for a resource not being found (e.g. no logs)"""
def __init__(self, message, location):
self.location = location
Exception.__init__(self, ': '.join([message, location]))
class NotImplementedError(Exception):
"""Exception for a feature which is not implemented yet"""
def __init__(self, message):
Exception.__init__(self, message)
class TimeoutError(Exception):
"""Exception for a download exceeding the allocated timeout"""
def __init__(self):
self.message = 'The download exceeded the allocated timeout'
Exception.__init__(self, self.message)
class Scraper(object):
"""Generic class to download an application from the Mozilla server"""
@ -152,7 +128,7 @@ class Scraper(object):
try:
self.get_build_info()
break
except (NotFoundError, requests.exceptions.RequestException), e:
except (errors.NotFoundError, requests.exceptions.RequestException), e:
if self.retry_attempts > 0:
# Log only if multiple attempts are requested
self.logger.warning("Build not found: '%s'" % e.message)
@ -165,7 +141,7 @@ class Scraper(object):
if hasattr(e, 'response') and \
e.response.status_code == 404:
message = "Specified build has not been found"
raise NotFoundError(message, e.response.url)
raise errors.NotFoundError(message, e.response.url)
else:
raise
@ -183,7 +159,7 @@ class Scraper(object):
authentication=self.authentication,
timeout=self.timeout_network)
if not parser.entries:
raise NotFoundError('No entries found', self.path)
raise errors.NotFoundError('No entries found', self.path)
# Download the first matched directory entry
pattern = re.compile(self.binary_regex, re.IGNORECASE)
@ -195,9 +171,9 @@ class Scraper(object):
# No match, continue with next entry
continue
else:
raise NotFoundError("Binary not found in folder",
self.path)
except (NotFoundError, requests.exceptions.RequestException), e:
raise errors.NotFoundError("Binary not found in folder",
self.path)
except (errors.NotFoundError, requests.exceptions.RequestException), e:
if self.retry_attempts > 0:
# Log only if multiple attempts are requested
self.logger.warning("Build not found: '%s'" % e.message)
@ -210,7 +186,7 @@ class Scraper(object):
if hasattr(e, 'response') and \
e.response.status_code == 404:
message = "Specified build has not been found"
raise NotFoundError(message, self.path)
raise errors.NotFoundError(message, self.path)
else:
raise
@ -220,7 +196,7 @@ class Scraper(object):
def binary_regex(self):
"""Return the regex for the binary filename"""
raise NotImplementedError(sys._getframe(0).f_code.co_name)
raise errors.NotImplementedError(sys._getframe(0).f_code.co_name)
@property
def final_url(self):
@ -238,7 +214,7 @@ class Scraper(object):
def path_regex(self):
"""Return the regex for the path to the build"""
raise NotImplementedError(sys._getframe(0).f_code.co_name)
raise errors.NotImplementedError(sys._getframe(0).f_code.co_name)
@property
def platform_regex(self):
@ -268,7 +244,7 @@ class Scraper(object):
def build_filename(self, binary):
"""Return the proposed filename with extension for the binary"""
raise NotImplementedError(sys._getframe(0).f_code.co_name)
raise errors.NotImplementedError(sys._getframe(0).f_code.co_name)
def detect_platform(self):
"""Detect the current platform"""
@ -346,12 +322,12 @@ class Scraper(object):
t1 = total_seconds(datetime.now() - start_time)
if self.timeout_download and \
t1 >= self.timeout_download:
raise TimeoutError
raise errors.TimeoutError
if log_level <= mozlog.INFO and content_length:
pbar.finish()
break
except (requests.exceptions.RequestException, TimeoutError), e:
except (requests.exceptions.RequestException, errors.TimeoutError), e:
if tmp_file and os.path.isfile(tmp_file):
os.remove(tmp_file)
if self.retry_attempts > 0:
@ -436,7 +412,7 @@ class DailyScraper(Scraper):
if not parser.entries:
message = 'Status file for %s build cannot be found' % \
self.platform_regex
raise NotFoundError(message, url)
raise errors.NotFoundError(message, url)
# Read status file for the platform, retrieve build id,
# and convert to a date
@ -503,7 +479,7 @@ class DailyScraper(Scraper):
date_format = '%Y-%m-%d-%H-%M-%S' if has_time else '%Y-%m-%d'
message = 'Folder for builds on %s has not been found' % \
self.date.strftime(date_format)
raise NotFoundError(message, url)
raise errors.NotFoundError(message, url)
# If no index has been given, set it to the last build of the day.
self.show_matching_builds(parser.entries)
@ -579,8 +555,8 @@ class DailyScraper(Scraper):
return path
except:
folder = urljoin(self.base_url, self.monthly_build_list_regex)
raise NotFoundError("Specified sub folder cannot be found",
folder)
raise errors.NotFoundError("Specified sub folder cannot be found",
folder)
class DirectScraper(Scraper):
@ -694,7 +670,7 @@ class ReleaseCandidateScraper(ReleaseScraper):
if not parser.entries:
message = 'Folder for specific candidate builds at %s has not' \
'been found' % url
raise NotFoundError(message, url)
raise errors.NotFoundError(message, url)
self.show_matching_builds(parser.entries)
@ -752,7 +728,7 @@ class ReleaseCandidateScraper(ReleaseScraper):
try:
# Try to download the signed candidate build
Scraper.download(self)
except NotFoundError, e:
except errors.NotFoundError, e:
self.logger.exception(str(e))
@ -919,7 +895,7 @@ class TinderboxScraper(Scraper):
if not parser.entries:
message = 'No builds have been found'
raise NotFoundError(message, url)
raise errors.NotFoundError(message, url)
self.show_matching_builds(parser.entries)
@ -1030,7 +1006,7 @@ class TryScraper(Scraper):
parser.entries = parser.filter('.*-%s$' % self.changeset)
if not parser.entries:
raise NotFoundError('No builds have been found', url)
raise errors.NotFoundError('No builds have been found', url)
self.show_matching_builds(parser.entries)
@ -1262,10 +1238,10 @@ def cli():
if options.application == 'b2g' and \
options.type in ('candidate', 'release'):
error_msg = "%s build is not yet supported for B2G" % options.type
raise NotSupportedError(error_msg)
raise errors.NotSupportedError(error_msg)
if options.application == 'fennec' and options.type != 'daily':
error_msg = "%s build is not yet supported for fennec" % options.type
raise NotSupportedError(error_msg)
raise errors.NotSupportedError(error_msg)
if options.url:
build = DirectScraper(options.url, **kwargs)
else:

6
pylama.ini Normal file
Просмотреть файл

@ -0,0 +1,6 @@
[pylama]
format = mccabe,pep8,pep257,pyflakes,pylint
ignore = C901
[pylama:pep8]
max_line_length = 100

Просмотреть файл

@ -7,25 +7,27 @@
import os
import unittest
import mozhttpd_base_test as mhttpd
import requests
import mozdownload
import mozdownload.errors as errors
from mozdownload.scraper import PLATFORM_FRAGMENTS
from mozdownload.utils import create_md5, urljoin
import mozhttpd_base_test as mhttpd
class BaseScraperTest(mhttpd.MozHttpdBaseTest):
class TestBaseScraper(mhttpd.MozHttpdBaseTest):
"""Testing the basic functionality of the Base Scraper Class"""
def test_platform_regex(self):
"""Test for correct platform_regex output"""
for key in mozdownload.PLATFORM_FRAGMENTS:
for key in PLATFORM_FRAGMENTS:
scraper = mozdownload.Scraper(destination=self.temp_dir,
version=None,
platform=key)
self.assertEqual(scraper.platform_regex,
mozdownload.PLATFORM_FRAGMENTS[key])
PLATFORM_FRAGMENTS[key])
def test_download(self):
"""Test download method"""
@ -71,9 +73,9 @@ class BaseScraperTest(mhttpd.MozHttpdBaseTest):
scraper = mozdownload.Scraper(destination=self.temp_dir,
version=None, log_level='ERROR')
for attr in ['binary', 'binary_regex', 'path_regex']:
self.assertRaises(mozdownload.NotImplementedError, getattr,
self.assertRaises(errors.NotImplementedError, getattr,
scraper, attr)
self.assertRaises(mozdownload.NotImplementedError,
self.assertRaises(errors.NotImplementedError,
scraper.build_filename, 'invalid binary')
def test_authentication(self):

Просмотреть файл

@ -8,16 +8,17 @@ import os
import unittest
import mozfile
import mozhttpd_base_test as mhttpd
from mozprocess import processhandler
import mozhttpd_base_test as mhttpd
tests = [
# ReleaseScraper
{'options': ['-v', 'latest'],
'fname': 'firefox-latest.en-US.linux64.tar.bz2'},
# ReleaseCandidateScraper
{'options': ['-t', 'candidate', '-v', '21.0', '-p', 'win32',],
{'options': ['-t', 'candidate', '-v', '21.0', '-p', 'win32'],
'fname': 'firefox-21.0-build3.en-US.win32.exe'},
# DailyScraper
@ -34,7 +35,7 @@ tests = [
]
class CorrectScraperTest(mhttpd.MozHttpdBaseTest):
class TestCorrectScraper(mhttpd.MozHttpdBaseTest):
"""Test mozdownload for correct choice of scraper"""
def test_scraper(self):
@ -42,8 +43,8 @@ class CorrectScraperTest(mhttpd.MozHttpdBaseTest):
for entry in tests:
command = ['./mozdownload/scraper.py',
'--base_url=%s' % self.wdir,
'--destination=%s' % self.temp_dir]
'--base_url=%s' % self.wdir,
'--destination=%s' % self.temp_dir]
p = processhandler.ProcessHandler(command + entry['options'])
p.run()
p.wait()

Просмотреть файл

@ -4,12 +4,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import unittest
import urllib
from mozdownload import DailyScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
test_params = [
@ -43,7 +40,7 @@ test_params = [
]
class DailyScraperTest_indices(mhttpd.MozHttpdBaseTest):
class TestDailyScraperIndices(mhttpd.MozHttpdBaseTest):
"""Test mozdownload daily scraper class"""
def test_build_indices(self):

Просмотреть файл

@ -334,12 +334,12 @@ fennec_tests = [
'target': '2015-06-12-00-40-06-mozilla-aurora-fennec-40.0a2.multi.android-arm.apk',
'target_url': 'mobile/nightly/2015/06/2015-06-12-00-40-06-mozilla-aurora-android-api-11/fennec-40.0a2.multi.android-arm.apk'
},
]
]
tests = firefox_tests + thunderbird_tests + b2g_tests + fennec_tests
class DailyScraperTest(mhttpd.MozHttpdBaseTest):
class TestDailyScraper(mhttpd.MozHttpdBaseTest):
"""Test mozdownload daily scraper class"""
def test_scraper(self):

Просмотреть файл

@ -4,14 +4,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import mozlog
import unittest
from mozdownload import DailyScraper, NotFoundError
from mozdownload.utils import urljoin
from mozdownload import DailyScraper
import mozdownload.errors as errors
import mozhttpd_base_test as mhttpd
# testing with an invalid branch parameter should raise an error
tests_with_invalid_branch = [
# -a firefox -t daily -p win32 --branch=invalid
@ -24,14 +24,14 @@ tests_with_invalid_branch = [
tests = tests_with_invalid_branch
class TestDailyScraper(mhttpd.MozHttpdBaseTest):
class TestDailyScraperInvalidBranch(mhttpd.MozHttpdBaseTest):
"""test mozdownload DailyScraper class with invalid branch parameters"""
def test_invalid_branch(self):
"""Testing download scenarios with invalid branch parameters for DailyScraper"""
for entry in tests:
self.assertRaises(NotFoundError, DailyScraper,
self.assertRaises(errors.NotFoundError, DailyScraper,
destination=self.temp_dir,
version=None,
base_url=self.wdir,

Просмотреть файл

@ -4,14 +4,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import mozlog
import sys
import unittest
import urllib
from mozdownload import DailyScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
@ -39,7 +34,7 @@ tests_with_invalid_date = [
tests = tests_with_invalid_date
class TestDailyScraper_invalidParameters(mhttpd.MozHttpdBaseTest):
class TestDailyScraperInvalidParameters(mhttpd.MozHttpdBaseTest):
"""test mozdownload DailyScraper class with invalid parameters"""
def test_scraper(self):

Просмотреть файл

@ -10,7 +10,8 @@ import unittest
import mozfile
from mozdownload import DirectScraper, NotImplementedError
from mozdownload import DirectScraper
import mozdownload.errors as errors
class TestDirectScraper(unittest.TestCase):
@ -34,7 +35,7 @@ class TestDirectScraper(unittest.TestCase):
os.path.join(self.temp_dir, 'index.html'))
for attr in ['binary', 'binary_regex', 'path', 'path_regex']:
self.assertRaises(NotImplementedError, getattr, scraper, attr)
self.assertRaises(errors.NotImplementedError, getattr, scraper, attr)
scraper.download()
self.assertTrue(os.path.isfile(os.path.join(self.temp_dir,

Просмотреть файл

@ -7,8 +7,9 @@
import os
import unittest
from mozdownload import DirectoryParser
from mozdownload.parser import DirectoryParser
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd

Просмотреть файл

@ -12,6 +12,7 @@ from mozdownload import ReleaseCandidateScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
firefox_tests = [
# -p win32 -v 21.0
{'args': {'platform': 'win32',

Просмотреть файл

@ -4,14 +4,12 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import unittest
import urllib
from mozdownload import ReleaseCandidateScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
test_params = [
# -a firefox -p win32 -v 21.0 --build-number=1
{'args': {'build_number': '1',

Просмотреть файл

@ -10,8 +10,10 @@ import urllib
from mozdownload import ReleaseScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
firefox_tests = [
# -p win32 -v latest
{'args': {'platform': 'win32',

Просмотреть файл

@ -4,15 +4,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import mozlog
import sys
import unittest
import urllib
from mozdownload import TinderboxScraper
from mozdownload import NotFoundError
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
@ -25,30 +19,26 @@ tests_with_invalid_date = [
'branch': 'mozilla-central',
'date': '20140317030202',
'locale': 'pt-PT',
'platform': 'win32'}
},
'platform': 'win32'}},
# -p win32 --branch=mozilla-central --date=invalid
{'args': {'branch': 'mozilla-central',
'date': 'invalid',
'locale': 'pt-PT',
'platform': 'win32'}
},
'platform': 'win32'}},
# -p win64 --branch=mozilla-central --date=2013/07/02
{'args': {'branch': 'mozilla-central',
'date': '2013/07/02',
'platform': 'win64'},
},
'platform': 'win64'}},
# -p win32 --branch=mozilla-central --date=2013-March-15
{'args': {'branch': 'mozilla-central',
'date': '2013-March-15',
'platform': 'win32'},
}
'platform': 'win32'}}
]
tests = tests_with_invalid_date
class TestTinderboxScraper_invalidParameters(mhttpd.MozHttpdBaseTest):
class TestTinderboxScraperInvalidParameters(mhttpd.MozHttpdBaseTest):
"""test mozdownload TinderboxScraper class with invalid parameters"""
def test_scraper(self):

Просмотреть файл

@ -10,8 +10,10 @@ import urllib
from mozdownload import TinderboxScraper
from mozdownload.utils import urljoin
import mozhttpd_base_test as mhttpd
firefox_tests = [
# -p win32
{'args': {'platform': 'win32'},

Просмотреть файл

@ -4,31 +4,32 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from mozdownload import TryScraper
from mozdownload import NotFoundError
import unittest
import mozhttpd_base_test as mhttpd
from mozdownload import TryScraper
import mozdownload.errors as errors
tests_with_invalid_changeset = [
# -a firefox -p win32 --changeset abcd
{'args': {'application': 'firefox',
'changeset': 'abcd',
'platform': 'win32'}
},
'platform': 'win32'}},
]
tests = tests_with_invalid_changeset
class TestTryScraper_invalidParameters(mhttpd.MozHttpdBaseTest):
class TestTryScraperInvalidParameters(mhttpd.MozHttpdBaseTest):
"""test mozdownload TryScraper class with invalid parameters"""
def test_scraper(self):
"""Testing download scenarios with invalid parameters for TryScraper"""
for entry in tests:
self.assertRaises(NotFoundError, TryScraper,
self.assertRaises(errors.NotFoundError, TryScraper,
destination=self.temp_dir,
version=None,
base_url=self.wdir,