зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1344852: Enable flake8 rule W602: "deprecated form of raising exception". r=dexter
This commit is contained in:
Родитель
cf1045e125
Коммит
54f2ad4b81
|
@ -1,5 +1,5 @@
|
|||
[flake8]
|
||||
# See http://pep8.readthedocs.io/en/latest/intro.html#configuration
|
||||
ignore = E121, E123, E126, E129, E133, E226, E241, E242, E704, W503, E402, E501, E202, W602, E127, W601
|
||||
ignore = E121, E123, E126, E129, E133, E226, E241, E242, E704, W503, E402, E501, E202, E127, W601
|
||||
max-line-length = 99
|
||||
filename = *.py, +.lint
|
||||
|
|
|
@ -39,7 +39,7 @@ def table_dispatch(kind, table, body):
|
|||
if kind in table:
|
||||
return body(table[kind])
|
||||
else:
|
||||
raise BaseException, "don't know how to handle a histogram of kind %s" % kind
|
||||
raise BaseException("don't know how to handle a histogram of kind %s" % kind)
|
||||
|
||||
|
||||
class DefinitionException(BaseException):
|
||||
|
@ -87,7 +87,7 @@ try:
|
|||
for name, whitelist in whitelists.iteritems():
|
||||
whitelists[name] = set(whitelist)
|
||||
except ValueError, e:
|
||||
raise BaseException, 'error parsing whitelist (%s)' % whitelist_path
|
||||
raise BaseException('error parsing whitelist (%s)' % whitelist_path)
|
||||
except IOError:
|
||||
whitelists = None
|
||||
print 'Unable to parse whitelist (%s). Assuming all histograms are acceptable.' % whitelist_path
|
||||
|
@ -135,7 +135,7 @@ symbol that should guard C/C++ definitions associated with the histogram."""
|
|||
'opt-out': 'DATASET_RELEASE_CHANNEL_OPTOUT'}
|
||||
value = definition.get('releaseChannelCollection', 'opt-in')
|
||||
if value not in datasets:
|
||||
raise DefinitionException, "unknown release channel collection policy for " + name
|
||||
raise DefinitionException("unknown release channel collection policy for " + name)
|
||||
self._dataset = "nsITelemetry::" + datasets[value]
|
||||
|
||||
def name(self):
|
||||
|
@ -249,11 +249,11 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
|
||||
def check_name(self, name):
|
||||
if '#' in name:
|
||||
raise ValueError, '"#" not permitted for %s' % (name)
|
||||
raise ValueError('"#" not permitted for %s' % (name))
|
||||
|
||||
# Avoid C++ identifier conflicts between histogram enums and label enum names.
|
||||
if name.startswith("LABELS_"):
|
||||
raise ValueError, "Histogram name '%s' can not start with LABELS_" % (name)
|
||||
raise ValueError("Histogram name '%s' can not start with LABELS_" % (name))
|
||||
|
||||
# To make it easier to generate C++ identifiers from this etc., we restrict
|
||||
# the histogram names to a strict pattern.
|
||||
|
@ -261,7 +261,7 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
if self._strict_type_checks:
|
||||
pattern = '^[a-z][a-z0-9_]+[a-z0-9]$'
|
||||
if not re.match(pattern, name, re.IGNORECASE):
|
||||
raise ValueError, "Histogram name '%s' doesn't confirm to '%s'" % (name, pattern)
|
||||
raise ValueError("Histogram name '%s' doesn't confirm to '%s'" % (name, pattern))
|
||||
|
||||
def check_expiration(self, name, definition):
|
||||
field = 'expires_in_version'
|
||||
|
@ -273,7 +273,7 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
# We forbid new probes from using "expires_in_version" : "default" field/value pair.
|
||||
# Old ones that use this are added to the whitelist.
|
||||
if expiration == "default" and name not in whitelists['expiry_default']:
|
||||
raise ValueError, 'New histogram "%s" cannot have "default" %s value.' % (name, field)
|
||||
raise ValueError('New histogram "%s" cannot have "default" %s value.' % (name, field))
|
||||
|
||||
if re.match(r'^[1-9][0-9]*$', expiration):
|
||||
expiration = expiration + ".0a1"
|
||||
|
@ -289,20 +289,20 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
|
||||
invalid = filter(lambda l: len(l) > MAX_LABEL_LENGTH, labels)
|
||||
if len(invalid) > 0:
|
||||
raise ValueError, 'Label values for %s exceed length limit of %d: %s' % \
|
||||
(name, MAX_LABEL_LENGTH, ', '.join(invalid))
|
||||
raise ValueError('Label values for %s exceed length limit of %d: %s' %
|
||||
(name, MAX_LABEL_LENGTH, ', '.join(invalid)))
|
||||
|
||||
if len(labels) > MAX_LABEL_COUNT:
|
||||
raise ValueError, 'Label count for %s exceeds limit of %d' % \
|
||||
(name, MAX_LABEL_COUNT)
|
||||
raise ValueError('Label count for %s exceeds limit of %d' %
|
||||
(name, MAX_LABEL_COUNT))
|
||||
|
||||
# To make it easier to generate C++ identifiers from this etc., we restrict
|
||||
# the label values to a strict pattern.
|
||||
pattern = '^[a-z][a-z0-9_]+[a-z0-9]$'
|
||||
invalid = filter(lambda l: not re.match(pattern, l, re.IGNORECASE), labels)
|
||||
if len(invalid) > 0:
|
||||
raise ValueError, 'Label values for %s are not matching pattern "%s": %s' % \
|
||||
(name, pattern, ', '.join(invalid))
|
||||
raise ValueError('Label values for %s are not matching pattern "%s": %s' %
|
||||
(name, pattern, ', '.join(invalid)))
|
||||
|
||||
# Check for the presence of fields that old histograms are whitelisted for.
|
||||
def check_whitelistable_fields(self, name, definition):
|
||||
|
@ -318,10 +318,10 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
|
||||
for field in ['alert_emails', 'bug_numbers']:
|
||||
if field not in definition and name not in whitelists[field]:
|
||||
raise KeyError, 'New histogram "%s" must have a %s field.' % (name, field)
|
||||
raise KeyError('New histogram "%s" must have a %s field.' % (name, field))
|
||||
if field in definition and name in whitelists[field]:
|
||||
msg = 'Should remove histogram "%s" from the whitelist for "%s" in histogram-whitelists.json'
|
||||
raise KeyError, msg % (name, field)
|
||||
raise KeyError(msg % (name, field))
|
||||
|
||||
def check_field_types(self, name, definition):
|
||||
# Define expected types for the histogram properties.
|
||||
|
@ -370,21 +370,21 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
if key not in definition:
|
||||
continue
|
||||
if not isinstance(definition[key], key_type):
|
||||
raise ValueError, ('value for key "{0}" in Histogram "{1}" '
|
||||
'should be {2}').format(key, name, nice_type_name(key_type))
|
||||
raise ValueError('value for key "{0}" in Histogram "{1}" should be {2}'
|
||||
.format(key, name, nice_type_name(key_type)))
|
||||
|
||||
for key, key_type in type_checked_list_fields.iteritems():
|
||||
if key not in definition:
|
||||
continue
|
||||
if not all(isinstance(x, key_type) for x in definition[key]):
|
||||
raise ValueError, ('all values for list "{0}" in Histogram "{1}" '
|
||||
'should be {2}').format(key, name, nice_type_name(key_type))
|
||||
raise ValueError('all values for list "{0}" in Histogram "{1}" should be {2}'
|
||||
.format(key, name, nice_type_name(key_type)))
|
||||
|
||||
@staticmethod
|
||||
def check_keys(name, definition, allowed_keys):
|
||||
for key in definition.iterkeys():
|
||||
if key not in allowed_keys:
|
||||
raise KeyError, '%s not permitted for %s' % (key, name)
|
||||
raise KeyError('%s not permitted for %s' % (key, name))
|
||||
|
||||
def set_bucket_parameters(self, low, high, n_buckets):
|
||||
self._low = low
|
||||
|
@ -392,9 +392,9 @@ associated with the histogram. Returns None if no guarding is necessary."""
|
|||
self._n_buckets = n_buckets
|
||||
if whitelists is not None and self._n_buckets > 100 and type(self._n_buckets) is int:
|
||||
if self._name not in whitelists['n_buckets']:
|
||||
raise KeyError, ('New histogram "%s" is not permitted to have more than 100 buckets. '
|
||||
'Histograms with large numbers of buckets use disproportionately high amounts of resources. '
|
||||
'Contact the Telemetry team (e.g. in #telemetry) if you think an exception ought to be made.' % self._name)
|
||||
raise KeyError('New histogram "%s" is not permitted to have more than 100 buckets. '
|
||||
'Histograms with large numbers of buckets use disproportionately high amounts of resources. '
|
||||
'Contact the Telemetry team (e.g. in #telemetry) if you think an exception ought to be made.' % self._name)
|
||||
|
||||
@staticmethod
|
||||
def boolean_flag_bucket_parameters(definition):
|
||||
|
@ -437,7 +437,7 @@ def from_Histograms_json(filename):
|
|||
try:
|
||||
histograms = json.load(f, object_pairs_hook=OrderedDict)
|
||||
except ValueError, e:
|
||||
raise BaseException, "error parsing histograms in %s: %s" % (filename, e.message)
|
||||
raise BaseException("error parsing histograms in %s: %s" % (filename, e.message))
|
||||
return histograms
|
||||
|
||||
|
||||
|
@ -498,11 +498,11 @@ the histograms defined in filenames.
|
|||
# all_histograms stable, which makes ordering in generated files
|
||||
# stable, which makes builds more deterministic.
|
||||
if not isinstance(histograms, OrderedDict):
|
||||
raise BaseException, "histogram parser didn't provide an OrderedDict"
|
||||
raise BaseException("histogram parser didn't provide an OrderedDict")
|
||||
|
||||
for (name, definition) in histograms.iteritems():
|
||||
if all_histograms.has_key(name):
|
||||
raise DefinitionException, "duplicate histogram name %s" % name
|
||||
raise DefinitionException("duplicate histogram name %s" % name)
|
||||
all_histograms[name] = definition
|
||||
|
||||
# We require that all USE_COUNTER2_* histograms be defined in a contiguous
|
||||
|
@ -514,7 +514,7 @@ the histograms defined in filenames.
|
|||
upper_bound = use_counter_indices[-1][0]
|
||||
n_counters = upper_bound - lower_bound + 1
|
||||
if n_counters != len(use_counter_indices):
|
||||
raise DefinitionException, "use counter histograms must be defined in a contiguous block"
|
||||
raise DefinitionException("use counter histograms must be defined in a contiguous block")
|
||||
|
||||
# Check that histograms that were removed from Histograms.json etc. are also removed from the whitelists.
|
||||
if whitelists is not None:
|
||||
|
@ -522,7 +522,7 @@ the histograms defined in filenames.
|
|||
orphaned = set(all_whitelist_entries) - set(all_histograms.keys())
|
||||
if len(orphaned) > 0:
|
||||
msg = 'The following entries are orphaned and should be removed from histogram-whitelists.json: %s'
|
||||
raise BaseException, msg % (', '.join(sorted(orphaned)))
|
||||
raise BaseException(msg % (', '.join(sorted(orphaned))))
|
||||
|
||||
for (name, definition) in all_histograms.iteritems():
|
||||
yield Histogram(name, definition, strict_type_checks=True)
|
||||
|
|
|
@ -53,48 +53,48 @@ class TypeChecker:
|
|||
# Check fields that can be one of two different types.
|
||||
if self._kind is OneOf:
|
||||
if not isinstance(value, self._args[0]) and not isinstance(value, self._args[1]):
|
||||
raise ValueError, "%s: failed type check for %s - expected %s or %s, got %s" %\
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(self._args[1]),
|
||||
nice_type_name(type(value)))
|
||||
raise ValueError("%s: failed type check for %s - expected %s or %s, got %s" %
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(self._args[1]),
|
||||
nice_type_name(type(value))))
|
||||
return
|
||||
|
||||
# Check basic type of value.
|
||||
if not isinstance(value, self._kind):
|
||||
raise ValueError, "%s: failed type check for %s - expected %s, got %s" %\
|
||||
(identifier, key,
|
||||
nice_type_name(self._kind),
|
||||
nice_type_name(type(value)))
|
||||
raise ValueError("%s: failed type check for %s - expected %s, got %s" %
|
||||
(identifier, key,
|
||||
nice_type_name(self._kind),
|
||||
nice_type_name(type(value))))
|
||||
|
||||
# Check types of values in lists.
|
||||
if self._kind is list:
|
||||
if len(value) < 1:
|
||||
raise ValueError, "%s: failed check for %s - list should not be empty" % (identifier, key)
|
||||
raise ValueError("%s: failed check for %s - list should not be empty" % (identifier, key))
|
||||
for x in value:
|
||||
if not isinstance(x, self._args[0]):
|
||||
raise ValueError, "%s: failed type check for %s - expected list value type %s, got %s" %\
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(type(x)))
|
||||
raise ValueError("%s: failed type check for %s - expected list value type %s, got %s" %
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(type(x))))
|
||||
|
||||
# Check types of keys and values in dictionaries.
|
||||
elif self._kind is dict:
|
||||
if len(value.keys()) < 1:
|
||||
raise ValueError, "%s: failed check for %s - dict should not be empty" % (identifier, key)
|
||||
raise ValueError("%s: failed check for %s - dict should not be empty" % (identifier, key))
|
||||
for x in value.iterkeys():
|
||||
if not isinstance(x, self._args[0]):
|
||||
raise ValueError, "%s: failed dict type check for %s - expected key type %s, got %s" %\
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(type(x)))
|
||||
raise ValueError("%s: failed dict type check for %s - expected key type %s, got %s" %
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[0]),
|
||||
nice_type_name(type(x))))
|
||||
for k, v in value.iteritems():
|
||||
if not isinstance(x, self._args[1]):
|
||||
raise ValueError, "%s: failed dict type check for %s - expected value type %s for key %s, got %s" %\
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[1]),
|
||||
k,
|
||||
nice_type_name(type(x)))
|
||||
raise ValueError("%s: failed dict type check for %s - expected value type %s for key %s, got %s" %
|
||||
(identifier, key,
|
||||
nice_type_name(self._args[1]),
|
||||
k,
|
||||
nice_type_name(type(x))))
|
||||
|
||||
|
||||
def type_check_event_fields(identifier, name, definition):
|
||||
|
@ -141,8 +141,8 @@ def string_check(identifier, field, value, min_length=1, max_length=None, regex=
|
|||
(identifier, value, field, max_length))
|
||||
# Regex check.
|
||||
if regex and not re.match(regex, value):
|
||||
raise ValueError, '%s: string value "%s" for %s is not matching pattern "%s"' % \
|
||||
(identifier, value, field, regex)
|
||||
raise ValueError('%s: string value "%s" for %s is not matching pattern "%s"' %
|
||||
(identifier, value, field, regex))
|
||||
|
||||
|
||||
class EventData:
|
||||
|
@ -170,8 +170,8 @@ class EventData:
|
|||
rcc = definition.get(rcc_key, 'opt-in')
|
||||
allowed_rcc = ["opt-in", "opt-out"]
|
||||
if rcc not in allowed_rcc:
|
||||
raise ValueError, "%s: value for %s should be one of: %s" %\
|
||||
(self.identifier, rcc_key, ", ".join(allowed_rcc))
|
||||
raise ValueError("%s: value for %s should be one of: %s" %
|
||||
(self.identifier, rcc_key, ", ".join(allowed_rcc)))
|
||||
|
||||
# Check record_in_processes.
|
||||
record_in_processes = definition.get('record_in_processes')
|
||||
|
@ -182,8 +182,8 @@ class EventData:
|
|||
# Check extra_keys.
|
||||
extra_keys = definition.get('extra_keys', {})
|
||||
if len(extra_keys.keys()) > MAX_EXTRA_KEYS_COUNT:
|
||||
raise ValueError, "%s: number of extra_keys exceeds limit %d" %\
|
||||
(self.identifier, MAX_EXTRA_KEYS_COUNT)
|
||||
raise ValueError("%s: number of extra_keys exceeds limit %d" %
|
||||
(self.identifier, MAX_EXTRA_KEYS_COUNT))
|
||||
for key in extra_keys.iterkeys():
|
||||
string_check(self.identifier, field='extra_keys', value=key,
|
||||
min_length=1, max_length=MAX_EXTRA_KEY_NAME_LENGTH,
|
||||
|
@ -191,13 +191,13 @@ class EventData:
|
|||
|
||||
# Check expiry.
|
||||
if 'expiry_version' not in definition and 'expiry_date' not in definition:
|
||||
raise KeyError, "%s: event is missing an expiration - either expiry_version or expiry_date is required" %\
|
||||
(self.identifier)
|
||||
raise KeyError("%s: event is missing an expiration - either expiry_version or expiry_date is required" %
|
||||
(self.identifier))
|
||||
expiry_date = definition.get('expiry_date')
|
||||
if expiry_date and isinstance(expiry_date, basestring) and expiry_date != 'never':
|
||||
if not re.match(DATE_PATTERN, expiry_date):
|
||||
raise ValueError, "%s: event has invalid expiry_date, it should be either 'never' or match this format: %s" %\
|
||||
(self.identifier, DATE_PATTERN)
|
||||
raise ValueError("%s: event has invalid expiry_date, it should be either 'never' or match this format: %s" %
|
||||
(self.identifier, DATE_PATTERN))
|
||||
# Parse into date.
|
||||
definition['expiry_date'] = datetime.datetime.strptime(expiry_date, '%Y-%m-%d')
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ class StringTable:
|
|||
f.write("const char %s[] = {\n" % name)
|
||||
for (string, offset) in entries:
|
||||
if "*/" in string:
|
||||
raise ValueError, "String in string table contains unexpected sequence '*/': %s" % string
|
||||
raise ValueError("String in string table contains unexpected sequence '*/': %s" % string)
|
||||
|
||||
e = explodeToCharArray(string)
|
||||
if e:
|
||||
|
|
Загрузка…
Ссылка в новой задаче