зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1635260 - Update vendored glean_parser to 1.28.0 r=janerik
Using ./mach vendor python glean_parser==1.28.0 (this is the latest version) Differential Revision: https://phabricator.services.mozilla.com/D84746
This commit is contained in:
Родитель
6a4961efee
Коммит
6d667f0971
|
@ -1,5 +1,38 @@
|
||||||
.. currentmodule:: jinja2
|
.. currentmodule:: jinja2
|
||||||
|
|
||||||
|
Version 2.11.2
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Released 2020-04-13
|
||||||
|
|
||||||
|
- Fix a bug that caused callable objects with ``__getattr__``, like
|
||||||
|
:class:`~unittest.mock.Mock` to be treated as a
|
||||||
|
:func:`contextfunction`. :issue:`1145`
|
||||||
|
- Update ``wordcount`` filter to trigger :class:`Undefined` methods
|
||||||
|
by wrapping the input in :func:`soft_unicode`. :pr:`1160`
|
||||||
|
- Fix a hang when displaying tracebacks on Python 32-bit.
|
||||||
|
:issue:`1162`
|
||||||
|
- Showing an undefined error for an object that raises
|
||||||
|
``AttributeError`` on access doesn't cause a recursion error.
|
||||||
|
:issue:`1177`
|
||||||
|
- Revert changes to :class:`~loaders.PackageLoader` from 2.10 which
|
||||||
|
removed the dependency on setuptools and pkg_resources, and added
|
||||||
|
limited support for namespace packages. The changes caused issues
|
||||||
|
when using Pytest. Due to the difficulty in supporting Python 2 and
|
||||||
|
:pep:`451` simultaneously, the changes are reverted until 3.0.
|
||||||
|
:pr:`1182`
|
||||||
|
- Fix line numbers in error messages when newlines are stripped.
|
||||||
|
:pr:`1178`
|
||||||
|
- The special ``namespace()`` assignment object in templates works in
|
||||||
|
async environments. :issue:`1180`
|
||||||
|
- Fix whitespace being removed before tags in the middle of lines when
|
||||||
|
``lstrip_blocks`` is enabled. :issue:`1138`
|
||||||
|
- :class:`~nativetypes.NativeEnvironment` doesn't evaluate
|
||||||
|
intermediate strings during rendering. This prevents early
|
||||||
|
evaluation which could change the value of an expression.
|
||||||
|
:issue:`1186`
|
||||||
|
|
||||||
|
|
||||||
Version 2.11.1
|
Version 2.11.1
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: Jinja2
|
Name: Jinja2
|
||||||
Version: 2.11.1
|
Version: 2.11.2
|
||||||
Summary: A very fast and expressive template engine.
|
Summary: A very fast and expressive template engine.
|
||||||
Home-page: https://palletsprojects.com/p/jinja/
|
Home-page: https://palletsprojects.com/p/jinja/
|
||||||
Author: Armin Ronacher
|
Author: Armin Ronacher
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
line_statement_prefix="#", variable_start_string="${", variable_end_string="}"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
env.from_string(
|
||||||
|
"""\
|
||||||
|
<ul>
|
||||||
|
# for item in range(10)
|
||||||
|
<li class="${loop.cycle('odd', 'even')}">${item}</li>
|
||||||
|
# endfor
|
||||||
|
</ul>\
|
||||||
|
"""
|
||||||
|
).render()
|
||||||
|
)
|
|
@ -0,0 +1,8 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
from jinja2.loaders import FileSystemLoader
|
||||||
|
|
||||||
|
env = Environment(loader=FileSystemLoader("templates"))
|
||||||
|
tmpl = env.get_template("broken.html")
|
||||||
|
print(tmpl.render(seq=[3, 2, 4, 5, 3, 2, 0, 2, 1]))
|
|
@ -0,0 +1,15 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
from jinja2.loaders import DictLoader
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
loader=DictLoader(
|
||||||
|
{
|
||||||
|
"a": "[A[{% block body %}{% endblock %}]]",
|
||||||
|
"b": "{% extends 'a' %}{% block body %}[B]{% endblock %}",
|
||||||
|
"c": "{% extends 'b' %}{% block body %}###{{ super() }}###{% endblock %}",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(env.get_template("c").render())
|
|
@ -0,0 +1,6 @@
|
||||||
|
{% from 'subbroken.html' import may_break %}
|
||||||
|
<ul>
|
||||||
|
{% for item in seq %}
|
||||||
|
<li>{{ may_break(item) }}</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
|
@ -0,0 +1,3 @@
|
||||||
|
{% macro may_break(item) -%}
|
||||||
|
[{{ item / 0 }}]
|
||||||
|
{%- endmacro %}
|
|
@ -0,0 +1,31 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
from jinja2.loaders import DictLoader
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
loader=DictLoader(
|
||||||
|
{
|
||||||
|
"child.html": u"""\
|
||||||
|
{% extends master_layout or 'master.html' %}
|
||||||
|
{% include helpers = 'helpers.html' %}
|
||||||
|
{% macro get_the_answer() %}42{% endmacro %}
|
||||||
|
{% title = 'Hello World' %}
|
||||||
|
{% block body %}
|
||||||
|
{{ get_the_answer() }}
|
||||||
|
{{ helpers.conspirate() }}
|
||||||
|
{% endblock %}
|
||||||
|
""",
|
||||||
|
"master.html": u"""\
|
||||||
|
<!doctype html>
|
||||||
|
<title>{{ title }}</title>
|
||||||
|
{% block body %}{% endblock %}
|
||||||
|
""",
|
||||||
|
"helpers.html": u"""\
|
||||||
|
{% macro conspirate() %}23{% endmacro %}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tmpl = env.get_template("child.html")
|
||||||
|
print(tmpl.render())
|
29
third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py
поставляемый
Normal file
29
third_party/python/Jinja2/examples/basic/test_filter_and_linestatements.py
поставляемый
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
line_statement_prefix="%", variable_start_string="${", variable_end_string="}"
|
||||||
|
)
|
||||||
|
tmpl = env.from_string(
|
||||||
|
"""\
|
||||||
|
% macro foo()
|
||||||
|
${caller(42)}
|
||||||
|
% endmacro
|
||||||
|
<ul>
|
||||||
|
% for item in seq
|
||||||
|
<li>${item}</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
% call(var) foo()
|
||||||
|
[${var}]
|
||||||
|
% endcall
|
||||||
|
% filter escape
|
||||||
|
<hello world>
|
||||||
|
% for item in [1, 2, 3]
|
||||||
|
- ${item}
|
||||||
|
% endfor
|
||||||
|
% endfilter
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
print(tmpl.render(seq=range(10)))
|
|
@ -0,0 +1,15 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
|
||||||
|
tmpl = Environment().from_string(
|
||||||
|
"""\
|
||||||
|
<ul>
|
||||||
|
{%- for item in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] if item % 2 == 0 %}
|
||||||
|
<li>{{ loop.index }} / {{ loop.length }}: {{ item }}</li>
|
||||||
|
{%- endfor %}
|
||||||
|
</ul>
|
||||||
|
if condition: {{ 1 if foo else 0 }}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
print(tmpl.render(foo=True))
|
|
@ -0,0 +1,20 @@
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
|
||||||
|
env = Environment(extensions=["jinja2.ext.i18n"])
|
||||||
|
env.globals["gettext"] = {"Hello %(user)s!": "Hallo %(user)s!"}.__getitem__
|
||||||
|
env.globals["ngettext"] = lambda s, p, n: {
|
||||||
|
"%(count)s user": "%(count)d Benutzer",
|
||||||
|
"%(count)s users": "%(count)d Benutzer",
|
||||||
|
}[n == 1 and s or p]
|
||||||
|
print(
|
||||||
|
env.from_string(
|
||||||
|
"""\
|
||||||
|
{% trans %}Hello {{ user }}!{% endtrans %}
|
||||||
|
{% trans count=users|count -%}
|
||||||
|
{{ count }} user{% pluralize %}{{ count }} users
|
||||||
|
{% endtrans %}
|
||||||
|
"""
|
||||||
|
).render(user="someone", users=[1, 2, 3])
|
||||||
|
)
|
|
@ -41,4 +41,4 @@ from .utils import evalcontextfunction
|
||||||
from .utils import is_undefined
|
from .utils import is_undefined
|
||||||
from .utils import select_autoescape
|
from .utils import select_autoescape
|
||||||
|
|
||||||
__version__ = "2.11.1"
|
__version__ = "2.11.2"
|
||||||
|
|
|
@ -26,17 +26,16 @@ async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
|
||||||
|
|
||||||
def dualfilter(normal_filter, async_filter):
|
def dualfilter(normal_filter, async_filter):
|
||||||
wrap_evalctx = False
|
wrap_evalctx = False
|
||||||
if getattr(normal_filter, "environmentfilter", False):
|
if getattr(normal_filter, "environmentfilter", False) is True:
|
||||||
|
|
||||||
def is_async(args):
|
def is_async(args):
|
||||||
return args[0].is_async
|
return args[0].is_async
|
||||||
|
|
||||||
wrap_evalctx = False
|
wrap_evalctx = False
|
||||||
else:
|
else:
|
||||||
if not getattr(normal_filter, "evalcontextfilter", False) and not getattr(
|
has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True
|
||||||
normal_filter, "contextfilter", False
|
has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True
|
||||||
):
|
wrap_evalctx = not has_evalctxfilter and not has_ctxfilter
|
||||||
wrap_evalctx = True
|
|
||||||
|
|
||||||
def is_async(args):
|
def is_async(args):
|
||||||
return args[0].environment.is_async
|
return args[0].environment.is_async
|
||||||
|
|
|
@ -1307,13 +1307,13 @@ class CodeGenerator(NodeVisitor):
|
||||||
def finalize(value):
|
def finalize(value):
|
||||||
return default(env_finalize(value))
|
return default(env_finalize(value))
|
||||||
|
|
||||||
if getattr(env_finalize, "contextfunction", False):
|
if getattr(env_finalize, "contextfunction", False) is True:
|
||||||
src += "context, "
|
src += "context, "
|
||||||
finalize = None # noqa: F811
|
finalize = None # noqa: F811
|
||||||
elif getattr(env_finalize, "evalcontextfunction", False):
|
elif getattr(env_finalize, "evalcontextfunction", False) is True:
|
||||||
src += "context.eval_ctx, "
|
src += "context.eval_ctx, "
|
||||||
finalize = None
|
finalize = None
|
||||||
elif getattr(env_finalize, "environmentfunction", False):
|
elif getattr(env_finalize, "environmentfunction", False) is True:
|
||||||
src += "environment, "
|
src += "environment, "
|
||||||
|
|
||||||
def finalize(value):
|
def finalize(value):
|
||||||
|
@ -1689,11 +1689,11 @@ class CodeGenerator(NodeVisitor):
|
||||||
func = self.environment.filters.get(node.name)
|
func = self.environment.filters.get(node.name)
|
||||||
if func is None:
|
if func is None:
|
||||||
self.fail("no filter named %r" % node.name, node.lineno)
|
self.fail("no filter named %r" % node.name, node.lineno)
|
||||||
if getattr(func, "contextfilter", False):
|
if getattr(func, "contextfilter", False) is True:
|
||||||
self.write("context, ")
|
self.write("context, ")
|
||||||
elif getattr(func, "evalcontextfilter", False):
|
elif getattr(func, "evalcontextfilter", False) is True:
|
||||||
self.write("context.eval_ctx, ")
|
self.write("context.eval_ctx, ")
|
||||||
elif getattr(func, "environmentfilter", False):
|
elif getattr(func, "environmentfilter", False) is True:
|
||||||
self.write("environment, ")
|
self.write("environment, ")
|
||||||
|
|
||||||
# if the filter node is None we are inside a filter block
|
# if the filter node is None we are inside a filter block
|
||||||
|
|
|
@ -245,10 +245,7 @@ else:
|
||||||
class _CTraceback(ctypes.Structure):
|
class _CTraceback(ctypes.Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
# Extra PyObject slots when compiled with Py_TRACE_REFS.
|
# Extra PyObject slots when compiled with Py_TRACE_REFS.
|
||||||
(
|
("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
|
||||||
"PyObject_HEAD",
|
|
||||||
ctypes.c_byte * (32 if hasattr(sys, "getobjects") else 16),
|
|
||||||
),
|
|
||||||
# Only care about tb_next as an object, not a traceback.
|
# Only care about tb_next as an object, not a traceback.
|
||||||
("tb_next", ctypes.py_object),
|
("tb_next", ctypes.py_object),
|
||||||
]
|
]
|
||||||
|
|
|
@ -492,20 +492,20 @@ class Environment(object):
|
||||||
if func is None:
|
if func is None:
|
||||||
fail_for_missing_callable("no filter named %r", name)
|
fail_for_missing_callable("no filter named %r", name)
|
||||||
args = [value] + list(args or ())
|
args = [value] + list(args or ())
|
||||||
if getattr(func, "contextfilter", False):
|
if getattr(func, "contextfilter", False) is True:
|
||||||
if context is None:
|
if context is None:
|
||||||
raise TemplateRuntimeError(
|
raise TemplateRuntimeError(
|
||||||
"Attempted to invoke context filter without context"
|
"Attempted to invoke context filter without context"
|
||||||
)
|
)
|
||||||
args.insert(0, context)
|
args.insert(0, context)
|
||||||
elif getattr(func, "evalcontextfilter", False):
|
elif getattr(func, "evalcontextfilter", False) is True:
|
||||||
if eval_ctx is None:
|
if eval_ctx is None:
|
||||||
if context is not None:
|
if context is not None:
|
||||||
eval_ctx = context.eval_ctx
|
eval_ctx = context.eval_ctx
|
||||||
else:
|
else:
|
||||||
eval_ctx = EvalContext(self)
|
eval_ctx = EvalContext(self)
|
||||||
args.insert(0, eval_ctx)
|
args.insert(0, eval_ctx)
|
||||||
elif getattr(func, "environmentfilter", False):
|
elif getattr(func, "environmentfilter", False) is True:
|
||||||
args.insert(0, self)
|
args.insert(0, self)
|
||||||
return func(*args, **(kwargs or {}))
|
return func(*args, **(kwargs or {}))
|
||||||
|
|
||||||
|
|
|
@ -761,7 +761,7 @@ def do_wordwrap(
|
||||||
|
|
||||||
def do_wordcount(s):
|
def do_wordcount(s):
|
||||||
"""Count the words in that string."""
|
"""Count the words in that string."""
|
||||||
return len(_word_re.findall(s))
|
return len(_word_re.findall(soft_unicode(s)))
|
||||||
|
|
||||||
|
|
||||||
def do_int(value, default=0, base=10):
|
def do_int(value, default=0, base=10):
|
||||||
|
|
|
@ -681,6 +681,8 @@ class Lexer(object):
|
||||||
source_length = len(source)
|
source_length = len(source)
|
||||||
balancing_stack = []
|
balancing_stack = []
|
||||||
lstrip_unless_re = self.lstrip_unless_re
|
lstrip_unless_re = self.lstrip_unless_re
|
||||||
|
newlines_stripped = 0
|
||||||
|
line_starting = True
|
||||||
|
|
||||||
while 1:
|
while 1:
|
||||||
# tokenizer loop
|
# tokenizer loop
|
||||||
|
@ -717,7 +719,9 @@ class Lexer(object):
|
||||||
|
|
||||||
if strip_sign == "-":
|
if strip_sign == "-":
|
||||||
# Strip all whitespace between the text and the tag.
|
# Strip all whitespace between the text and the tag.
|
||||||
groups = (text.rstrip(),) + groups[1:]
|
stripped = text.rstrip()
|
||||||
|
newlines_stripped = text[len(stripped) :].count("\n")
|
||||||
|
groups = (stripped,) + groups[1:]
|
||||||
elif (
|
elif (
|
||||||
# Not marked for preserving whitespace.
|
# Not marked for preserving whitespace.
|
||||||
strip_sign != "+"
|
strip_sign != "+"
|
||||||
|
@ -728,11 +732,11 @@ class Lexer(object):
|
||||||
):
|
):
|
||||||
# The start of text between the last newline and the tag.
|
# The start of text between the last newline and the tag.
|
||||||
l_pos = text.rfind("\n") + 1
|
l_pos = text.rfind("\n") + 1
|
||||||
|
if l_pos > 0 or line_starting:
|
||||||
# If there's only whitespace between the newline and the
|
# If there's only whitespace between the newline and the
|
||||||
# tag, strip it.
|
# tag, strip it.
|
||||||
if not lstrip_unless_re.search(text, l_pos):
|
if not lstrip_unless_re.search(text, l_pos):
|
||||||
groups = (text[:l_pos],) + groups[1:]
|
groups = (text[:l_pos],) + groups[1:]
|
||||||
|
|
||||||
for idx, token in enumerate(tokens):
|
for idx, token in enumerate(tokens):
|
||||||
# failure group
|
# failure group
|
||||||
|
@ -758,7 +762,8 @@ class Lexer(object):
|
||||||
data = groups[idx]
|
data = groups[idx]
|
||||||
if data or token not in ignore_if_empty:
|
if data or token not in ignore_if_empty:
|
||||||
yield lineno, token, data
|
yield lineno, token, data
|
||||||
lineno += data.count("\n")
|
lineno += data.count("\n") + newlines_stripped
|
||||||
|
newlines_stripped = 0
|
||||||
|
|
||||||
# strings as token just are yielded as it.
|
# strings as token just are yielded as it.
|
||||||
else:
|
else:
|
||||||
|
@ -790,6 +795,8 @@ class Lexer(object):
|
||||||
yield lineno, tokens, data
|
yield lineno, tokens, data
|
||||||
lineno += data.count("\n")
|
lineno += data.count("\n")
|
||||||
|
|
||||||
|
line_starting = m.group()[-1:] == "\n"
|
||||||
|
|
||||||
# fetch new position into new variable so that we can check
|
# fetch new position into new variable so that we can check
|
||||||
# if there is a internal parsing error which would result
|
# if there is a internal parsing error which would result
|
||||||
# in an infinite loop
|
# in an infinite loop
|
||||||
|
|
|
@ -3,11 +3,9 @@
|
||||||
sources.
|
sources.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import pkgutil
|
|
||||||
import sys
|
import sys
|
||||||
import weakref
|
import weakref
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from importlib import import_module
|
|
||||||
from os import path
|
from os import path
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
|
||||||
|
@ -217,141 +215,75 @@ class FileSystemLoader(BaseLoader):
|
||||||
|
|
||||||
|
|
||||||
class PackageLoader(BaseLoader):
|
class PackageLoader(BaseLoader):
|
||||||
"""Load templates from a directory in a Python package.
|
"""Load templates from python eggs or packages. It is constructed with
|
||||||
|
the name of the python package and the path to the templates in that
|
||||||
|
package::
|
||||||
|
|
||||||
:param package_name: Import name of the package that contains the
|
loader = PackageLoader('mypackage', 'views')
|
||||||
template directory.
|
|
||||||
:param package_path: Directory within the imported package that
|
|
||||||
contains the templates.
|
|
||||||
:param encoding: Encoding of template files.
|
|
||||||
|
|
||||||
The following example looks up templates in the ``pages`` directory
|
If the package path is not given, ``'templates'`` is assumed.
|
||||||
within the ``project.ui`` package.
|
|
||||||
|
|
||||||
.. code-block:: python
|
Per default the template encoding is ``'utf-8'`` which can be changed
|
||||||
|
by setting the `encoding` parameter to something else. Due to the nature
|
||||||
loader = PackageLoader("project.ui", "pages")
|
of eggs it's only possible to reload templates if the package was loaded
|
||||||
|
from the file system and not a zip file.
|
||||||
Only packages installed as directories (standard pip behavior) or
|
|
||||||
zip/egg files (less common) are supported. The Python API for
|
|
||||||
introspecting data in packages is too limited to support other
|
|
||||||
installation methods the way this loader requires.
|
|
||||||
|
|
||||||
There is limited support for :pep:`420` namespace packages. The
|
|
||||||
template directory is assumed to only be in one namespace
|
|
||||||
contributor. Zip files contributing to a namespace are not
|
|
||||||
supported.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.11.0
|
|
||||||
No longer uses ``setuptools`` as a dependency.
|
|
||||||
|
|
||||||
.. versionchanged:: 2.11.0
|
|
||||||
Limited PEP 420 namespace package support.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, package_name, package_path="templates", encoding="utf-8"):
|
def __init__(self, package_name, package_path="templates", encoding="utf-8"):
|
||||||
if package_path == os.path.curdir:
|
from pkg_resources import DefaultProvider
|
||||||
package_path = ""
|
from pkg_resources import get_provider
|
||||||
elif package_path[:2] == os.path.curdir + os.path.sep:
|
from pkg_resources import ResourceManager
|
||||||
package_path = package_path[2:]
|
|
||||||
|
|
||||||
package_path = os.path.normpath(package_path).rstrip(os.path.sep)
|
provider = get_provider(package_name)
|
||||||
self.package_path = package_path
|
|
||||||
self.package_name = package_name
|
|
||||||
self.encoding = encoding
|
self.encoding = encoding
|
||||||
|
self.manager = ResourceManager()
|
||||||
# Make sure the package exists. This also makes namespace
|
self.filesystem_bound = isinstance(provider, DefaultProvider)
|
||||||
# packages work, otherwise get_loader returns None.
|
self.provider = provider
|
||||||
import_module(package_name)
|
self.package_path = package_path
|
||||||
self._loader = loader = pkgutil.get_loader(package_name)
|
|
||||||
|
|
||||||
# Zip loader's archive attribute points at the zip.
|
|
||||||
self._archive = getattr(loader, "archive", None)
|
|
||||||
self._template_root = None
|
|
||||||
|
|
||||||
if hasattr(loader, "get_filename"):
|
|
||||||
# A standard directory package, or a zip package.
|
|
||||||
self._template_root = os.path.join(
|
|
||||||
os.path.dirname(loader.get_filename(package_name)), package_path
|
|
||||||
)
|
|
||||||
elif hasattr(loader, "_path"):
|
|
||||||
# A namespace package, limited support. Find the first
|
|
||||||
# contributor with the template directory.
|
|
||||||
for root in loader._path:
|
|
||||||
root = os.path.join(root, package_path)
|
|
||||||
|
|
||||||
if os.path.isdir(root):
|
|
||||||
self._template_root = root
|
|
||||||
break
|
|
||||||
|
|
||||||
if self._template_root is None:
|
|
||||||
raise ValueError(
|
|
||||||
"The %r package was not installed in a way that"
|
|
||||||
" PackageLoader understands." % package_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_source(self, environment, template):
|
def get_source(self, environment, template):
|
||||||
p = os.path.join(self._template_root, *split_template_path(template))
|
pieces = split_template_path(template)
|
||||||
|
p = "/".join((self.package_path,) + tuple(pieces))
|
||||||
|
|
||||||
if self._archive is None:
|
if not self.provider.has_resource(p):
|
||||||
# Package is a directory.
|
raise TemplateNotFound(template)
|
||||||
if not os.path.isfile(p):
|
|
||||||
raise TemplateNotFound(template)
|
|
||||||
|
|
||||||
with open(p, "rb") as f:
|
filename = uptodate = None
|
||||||
source = f.read()
|
|
||||||
|
|
||||||
mtime = os.path.getmtime(p)
|
if self.filesystem_bound:
|
||||||
|
filename = self.provider.get_resource_filename(self.manager, p)
|
||||||
|
mtime = path.getmtime(filename)
|
||||||
|
|
||||||
def up_to_date():
|
def uptodate():
|
||||||
return os.path.isfile(p) and os.path.getmtime(p) == mtime
|
try:
|
||||||
|
return path.getmtime(filename) == mtime
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
else:
|
source = self.provider.get_resource_string(self.manager, p)
|
||||||
# Package is a zip file.
|
return source.decode(self.encoding), filename, uptodate
|
||||||
try:
|
|
||||||
source = self._loader.get_data(p)
|
|
||||||
except OSError:
|
|
||||||
raise TemplateNotFound(template)
|
|
||||||
|
|
||||||
# Could use the zip's mtime for all template mtimes, but
|
|
||||||
# would need to safely reload the module if it's out of
|
|
||||||
# date, so just report it as always current.
|
|
||||||
up_to_date = None
|
|
||||||
|
|
||||||
return source.decode(self.encoding), p, up_to_date
|
|
||||||
|
|
||||||
def list_templates(self):
|
def list_templates(self):
|
||||||
|
path = self.package_path
|
||||||
|
|
||||||
|
if path[:2] == "./":
|
||||||
|
path = path[2:]
|
||||||
|
elif path == ".":
|
||||||
|
path = ""
|
||||||
|
|
||||||
|
offset = len(path)
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
if self._archive is None:
|
def _walk(path):
|
||||||
# Package is a directory.
|
for filename in self.provider.resource_listdir(path):
|
||||||
offset = len(self._template_root)
|
fullname = path + "/" + filename
|
||||||
|
|
||||||
for dirpath, _, filenames in os.walk(self._template_root):
|
if self.provider.resource_isdir(fullname):
|
||||||
dirpath = dirpath[offset:].lstrip(os.path.sep)
|
_walk(fullname)
|
||||||
results.extend(
|
else:
|
||||||
os.path.join(dirpath, name).replace(os.path.sep, "/")
|
results.append(fullname[offset:].lstrip("/"))
|
||||||
for name in filenames
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if not hasattr(self._loader, "_files"):
|
|
||||||
raise TypeError(
|
|
||||||
"This zip import does not have the required"
|
|
||||||
" metadata to list templates."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Package is a zip file.
|
|
||||||
prefix = (
|
|
||||||
self._template_root[len(self._archive) :].lstrip(os.path.sep)
|
|
||||||
+ os.path.sep
|
|
||||||
)
|
|
||||||
offset = len(prefix)
|
|
||||||
|
|
||||||
for name in self._loader._files.keys():
|
|
||||||
# Find names under the templates directory that aren't directories.
|
|
||||||
if name.startswith(prefix) and name[-1] != os.path.sep:
|
|
||||||
results.append(name[offset:].replace(os.path.sep, "/"))
|
|
||||||
|
|
||||||
|
_walk(path)
|
||||||
results.sort()
|
results.sort()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import types
|
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from itertools import islice
|
from itertools import islice
|
||||||
|
@ -11,7 +10,7 @@ from .environment import Environment
|
||||||
from .environment import Template
|
from .environment import Template
|
||||||
|
|
||||||
|
|
||||||
def native_concat(nodes, preserve_quotes=True):
|
def native_concat(nodes):
|
||||||
"""Return a native Python type from the list of compiled nodes. If
|
"""Return a native Python type from the list of compiled nodes. If
|
||||||
the result is a single node, its value is returned. Otherwise, the
|
the result is a single node, its value is returned. Otherwise, the
|
||||||
nodes are concatenated as strings. If the result can be parsed with
|
nodes are concatenated as strings. If the result can be parsed with
|
||||||
|
@ -19,9 +18,6 @@ def native_concat(nodes, preserve_quotes=True):
|
||||||
the string is returned.
|
the string is returned.
|
||||||
|
|
||||||
:param nodes: Iterable of nodes to concatenate.
|
:param nodes: Iterable of nodes to concatenate.
|
||||||
:param preserve_quotes: Whether to re-wrap literal strings with
|
|
||||||
quotes, to preserve quotes around expressions for later parsing.
|
|
||||||
Should be ``False`` in :meth:`NativeEnvironment.render`.
|
|
||||||
"""
|
"""
|
||||||
head = list(islice(nodes, 2))
|
head = list(islice(nodes, 2))
|
||||||
|
|
||||||
|
@ -31,29 +27,17 @@ def native_concat(nodes, preserve_quotes=True):
|
||||||
if len(head) == 1:
|
if len(head) == 1:
|
||||||
raw = head[0]
|
raw = head[0]
|
||||||
else:
|
else:
|
||||||
if isinstance(nodes, types.GeneratorType):
|
raw = u"".join([text_type(v) for v in chain(head, nodes)])
|
||||||
nodes = chain(head, nodes)
|
|
||||||
raw = u"".join([text_type(v) for v in nodes])
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
literal = literal_eval(raw)
|
return literal_eval(raw)
|
||||||
except (ValueError, SyntaxError, MemoryError):
|
except (ValueError, SyntaxError, MemoryError):
|
||||||
return raw
|
return raw
|
||||||
|
|
||||||
# If literal_eval returned a string, re-wrap with the original
|
|
||||||
# quote character to avoid dropping quotes between expression nodes.
|
|
||||||
# Without this, "'{{ a }}', '{{ b }}'" results in "a, b", but should
|
|
||||||
# be ('a', 'b').
|
|
||||||
if preserve_quotes and isinstance(literal, str):
|
|
||||||
return "{quote}{}{quote}".format(literal, quote=raw[0])
|
|
||||||
|
|
||||||
return literal
|
|
||||||
|
|
||||||
|
|
||||||
class NativeCodeGenerator(CodeGenerator):
|
class NativeCodeGenerator(CodeGenerator):
|
||||||
"""A code generator which renders Python types by not adding
|
"""A code generator which renders Python types by not adding
|
||||||
``to_string()`` around output nodes, and using :func:`native_concat`
|
``to_string()`` around output nodes.
|
||||||
to convert complex strings back to Python types if possible.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -61,7 +45,7 @@ class NativeCodeGenerator(CodeGenerator):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def _output_const_repr(self, group):
|
def _output_const_repr(self, group):
|
||||||
return repr(native_concat(group))
|
return repr(u"".join([text_type(v) for v in group]))
|
||||||
|
|
||||||
def _output_child_to_const(self, node, frame, finalize):
|
def _output_child_to_const(self, node, frame, finalize):
|
||||||
const = node.as_const(frame.eval_ctx)
|
const = node.as_const(frame.eval_ctx)
|
||||||
|
@ -100,10 +84,9 @@ class NativeTemplate(Template):
|
||||||
Otherwise, the string is returned.
|
Otherwise, the string is returned.
|
||||||
"""
|
"""
|
||||||
vars = dict(*args, **kwargs)
|
vars = dict(*args, **kwargs)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return native_concat(
|
return native_concat(self.root_render_func(self.new_context(vars)))
|
||||||
self.root_render_func(self.new_context(vars)), preserve_quotes=False
|
|
||||||
)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
return self.environment.handle_exception()
|
return self.environment.handle_exception()
|
||||||
|
|
||||||
|
|
|
@ -671,7 +671,7 @@ class Filter(Expr):
|
||||||
# python 3. because of that, do not rename filter_ to filter!
|
# python 3. because of that, do not rename filter_ to filter!
|
||||||
filter_ = self.environment.filters.get(self.name)
|
filter_ = self.environment.filters.get(self.name)
|
||||||
|
|
||||||
if filter_ is None or getattr(filter_, "contextfilter", False):
|
if filter_ is None or getattr(filter_, "contextfilter", False) is True:
|
||||||
raise Impossible()
|
raise Impossible()
|
||||||
|
|
||||||
# We cannot constant handle async filters, so we need to make sure
|
# We cannot constant handle async filters, so we need to make sure
|
||||||
|
@ -684,9 +684,9 @@ class Filter(Expr):
|
||||||
args, kwargs = args_as_const(self, eval_ctx)
|
args, kwargs = args_as_const(self, eval_ctx)
|
||||||
args.insert(0, self.node.as_const(eval_ctx))
|
args.insert(0, self.node.as_const(eval_ctx))
|
||||||
|
|
||||||
if getattr(filter_, "evalcontextfilter", False):
|
if getattr(filter_, "evalcontextfilter", False) is True:
|
||||||
args.insert(0, eval_ctx)
|
args.insert(0, eval_ctx)
|
||||||
elif getattr(filter_, "environmentfilter", False):
|
elif getattr(filter_, "environmentfilter", False) is True:
|
||||||
args.insert(0, self.environment)
|
args.insert(0, self.environment)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -280,11 +280,11 @@ class Context(with_metaclass(ContextMeta)):
|
||||||
break
|
break
|
||||||
|
|
||||||
if callable(__obj):
|
if callable(__obj):
|
||||||
if getattr(__obj, "contextfunction", 0):
|
if getattr(__obj, "contextfunction", False) is True:
|
||||||
args = (__self,) + args
|
args = (__self,) + args
|
||||||
elif getattr(__obj, "evalcontextfunction", 0):
|
elif getattr(__obj, "evalcontextfunction", False) is True:
|
||||||
args = (__self.eval_ctx,) + args
|
args = (__self.eval_ctx,) + args
|
||||||
elif getattr(__obj, "environmentfunction", 0):
|
elif getattr(__obj, "environmentfunction", False) is True:
|
||||||
args = (__self.environment,) + args
|
args = (__self.environment,) + args
|
||||||
try:
|
try:
|
||||||
return __obj(*args, **kwargs)
|
return __obj(*args, **kwargs)
|
||||||
|
|
|
@ -165,11 +165,15 @@ def object_type_repr(obj):
|
||||||
return "None"
|
return "None"
|
||||||
elif obj is Ellipsis:
|
elif obj is Ellipsis:
|
||||||
return "Ellipsis"
|
return "Ellipsis"
|
||||||
|
|
||||||
|
cls = type(obj)
|
||||||
|
|
||||||
# __builtin__ in 2.x, builtins in 3.x
|
# __builtin__ in 2.x, builtins in 3.x
|
||||||
if obj.__class__.__module__ in ("__builtin__", "builtins"):
|
if cls.__module__ in ("__builtin__", "builtins"):
|
||||||
name = obj.__class__.__name__
|
name = cls.__name__
|
||||||
else:
|
else:
|
||||||
name = obj.__class__.__module__ + "." + obj.__class__.__name__
|
name = cls.__module__ + "." + cls.__name__
|
||||||
|
|
||||||
return "%s object" % name
|
return "%s object" % name
|
||||||
|
|
||||||
|
|
||||||
|
@ -693,7 +697,8 @@ class Namespace(object):
|
||||||
self.__attrs = dict(*args, **kwargs)
|
self.__attrs = dict(*args, **kwargs)
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
def __getattribute__(self, name):
|
||||||
if name == "_Namespace__attrs":
|
# __class__ is needed for the awaitable check in async mode
|
||||||
|
if name in {"_Namespace__attrs", "__class__"}:
|
||||||
return object.__getattribute__(self, name)
|
return object.__getattribute__(self, name)
|
||||||
try:
|
try:
|
||||||
return self.__attrs[name]
|
return self.__attrs[name]
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
# Minimal makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line.
|
|
||||||
SPHINXOPTS =
|
|
||||||
SPHINXBUILD = sphinx-build
|
|
||||||
SOURCEDIR = .
|
|
||||||
BUILDDIR = _build
|
|
||||||
|
|
||||||
# Put it first so that "make" without argument is like "make help".
|
|
||||||
help:
|
|
||||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
.PHONY: help Makefile
|
|
||||||
|
|
||||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
||||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
||||||
%: Makefile
|
|
||||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
|
@ -1,4 +0,0 @@
|
||||||
Changes
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. include:: ../CHANGES.rst
|
|
|
@ -1,42 +0,0 @@
|
||||||
from pallets_sphinx_themes import get_version
|
|
||||||
from pallets_sphinx_themes import ProjectLink
|
|
||||||
|
|
||||||
# Project --------------------------------------------------------------
|
|
||||||
|
|
||||||
project = "MarkupSafe"
|
|
||||||
copyright = "2010 Pallets Team"
|
|
||||||
author = "Pallets Team"
|
|
||||||
release, version = get_version("MarkupSafe")
|
|
||||||
|
|
||||||
# General --------------------------------------------------------------
|
|
||||||
|
|
||||||
master_doc = "index"
|
|
||||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "pallets_sphinx_themes"]
|
|
||||||
intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)}
|
|
||||||
|
|
||||||
# HTML -----------------------------------------------------------------
|
|
||||||
|
|
||||||
html_theme = "flask"
|
|
||||||
html_theme_options = {"index_sidebar_logo": False}
|
|
||||||
html_context = {
|
|
||||||
"project_links": [
|
|
||||||
ProjectLink("Donate to Pallets", "https://palletsprojects.com/donate"),
|
|
||||||
ProjectLink("Website", "https://palletsprojects.com/p/markupsafe/"),
|
|
||||||
ProjectLink("PyPI releases", "https://pypi.org/project/MarkupSafe/"),
|
|
||||||
ProjectLink("Source Code", "https://github.com/pallets/markupsafe/"),
|
|
||||||
ProjectLink("Issue Tracker", "https://github.com/pallets/markupsafe/issues/"),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
html_sidebars = {
|
|
||||||
"index": ["project.html", "localtoc.html", "searchbox.html"],
|
|
||||||
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
|
||||||
}
|
|
||||||
singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
|
|
||||||
html_title = "MarkupSafe Documentation ({})".format(version)
|
|
||||||
html_show_sourcelink = False
|
|
||||||
|
|
||||||
# LaTeX ----------------------------------------------------------------
|
|
||||||
|
|
||||||
latex_documents = [
|
|
||||||
(master_doc, "MarkupSafe-{}.tex".format(version), html_title, author, "manual")
|
|
||||||
]
|
|
|
@ -1,21 +0,0 @@
|
||||||
.. module:: markupsafe
|
|
||||||
|
|
||||||
Working With Safe Text
|
|
||||||
======================
|
|
||||||
|
|
||||||
.. autofunction:: escape
|
|
||||||
|
|
||||||
.. autoclass:: Markup
|
|
||||||
:members: escape, unescape, striptags
|
|
||||||
|
|
||||||
|
|
||||||
Optional Values
|
|
||||||
---------------
|
|
||||||
|
|
||||||
.. autofunction:: escape_silent
|
|
||||||
|
|
||||||
|
|
||||||
Convert an Object to a String
|
|
||||||
-----------------------------
|
|
||||||
|
|
||||||
.. autofunction:: soft_unicode
|
|
|
@ -1,77 +0,0 @@
|
||||||
.. currentmodule:: markupsafe
|
|
||||||
|
|
||||||
String Formatting
|
|
||||||
=================
|
|
||||||
|
|
||||||
The :class:`Markup` class can be used as a format string. Objects
|
|
||||||
formatted into a markup string will be escaped first.
|
|
||||||
|
|
||||||
|
|
||||||
Format Method
|
|
||||||
-------------
|
|
||||||
|
|
||||||
The ``format`` method extends the standard :meth:`str.format` behavior
|
|
||||||
to use an ``__html_format__`` method.
|
|
||||||
|
|
||||||
#. If an object has an ``__html_format__`` method, it is called as a
|
|
||||||
replacement for the ``__format__`` method. It is passed a format
|
|
||||||
specifier if it's given. The method must return a string or
|
|
||||||
:class:`Markup` instance.
|
|
||||||
|
|
||||||
#. If an object has an ``__html__`` method, it is called. If a format
|
|
||||||
specifier was passed and the class defined ``__html__`` but not
|
|
||||||
``__html_format__``, a ``ValueError`` is raised.
|
|
||||||
|
|
||||||
#. Otherwise Python's default format behavior is used and the result
|
|
||||||
is escaped.
|
|
||||||
|
|
||||||
For example, to implement a ``User`` that wraps its ``name`` in a
|
|
||||||
``span`` tag, and adds a link when using the ``'link'`` format
|
|
||||||
specifier:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User(object):
|
|
||||||
def __init__(self, id, name):
|
|
||||||
self.id = id
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def __html_format__(self, format_spec):
|
|
||||||
if format_spec == 'link':
|
|
||||||
return Markup(
|
|
||||||
'<a href="/user/{}">{}</a>'
|
|
||||||
).format(self.id, self.__html__())
|
|
||||||
elif format_spec:
|
|
||||||
raise ValueError('Invalid format spec')
|
|
||||||
return self.__html__()
|
|
||||||
|
|
||||||
def __html__(self):
|
|
||||||
return Markup(
|
|
||||||
'<span class="user">{0}</span>'
|
|
||||||
).format(self.name)
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> user = User(3, '<script>')
|
|
||||||
>>> escape(user)
|
|
||||||
Markup('<span class="user"><script></span>')
|
|
||||||
>>> Markup('<p>User: {user:link}').format(user=user)
|
|
||||||
Markup('<p>User: <a href="/user/3"><span class="user"><script></span></a>
|
|
||||||
|
|
||||||
See Python's docs on :ref:`format string syntax <python:formatstrings>`.
|
|
||||||
|
|
||||||
|
|
||||||
printf-style Formatting
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
Besides escaping, there's no special behavior involved with percent
|
|
||||||
formatting.
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> user = User(3, '<script>')
|
|
||||||
>>> Markup('<a href="/user/%d">"%s</a>') % (user.id, user.name)
|
|
||||||
Markup('<a href="/user/3"><script></a>')
|
|
||||||
|
|
||||||
See Python's docs on :ref:`printf-style formatting <python:old-string-formatting>`.
|
|
|
@ -1,51 +0,0 @@
|
||||||
.. currentmodule:: markupsafe
|
|
||||||
|
|
||||||
HTML Representations
|
|
||||||
====================
|
|
||||||
|
|
||||||
In many frameworks, if a class implements an ``__html__`` method it
|
|
||||||
will be used to get the object's representation in HTML. MarkupSafe's
|
|
||||||
:func:`escape` function and :class:`Markup` class understand and
|
|
||||||
implement this method. If an object has an ``__html__`` method it will
|
|
||||||
be called rather than converting the object to a string, and the result
|
|
||||||
will be assumed safe and not escaped.
|
|
||||||
|
|
||||||
For example, an ``Image`` class might automatically generate an
|
|
||||||
``<img>`` tag:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Image:
|
|
||||||
def __init__(self, url):
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
def __html__(self):
|
|
||||||
return '<img src="%s">' % self.url
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> img = Image('/static/logo.png')
|
|
||||||
>>> Markup(img)
|
|
||||||
Markup('<img src="/static/logo.png">')
|
|
||||||
|
|
||||||
Since this bypasses escaping, you need to be careful about using
|
|
||||||
user-provided data in the output. For example, a user's display name
|
|
||||||
should still be escaped:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class User:
|
|
||||||
def __init__(self, id, name):
|
|
||||||
self.id = id
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def __html__(self):
|
|
||||||
return '<a href="/user/{}">{}</a>'.format(
|
|
||||||
self.id, escape(self.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> user = User(3, '<script>')
|
|
||||||
>>> escape(user)
|
|
||||||
Markup('<a href="/users/3"><script></a>')
|
|
|
@ -1,53 +0,0 @@
|
||||||
.. currentmodule:: markupsafe
|
|
||||||
|
|
||||||
MarkupSafe
|
|
||||||
==========
|
|
||||||
|
|
||||||
MarkupSafe escapes characters so text is safe to use in HTML and XML.
|
|
||||||
Characters that have special meanings are replaced so that they display
|
|
||||||
as the actual characters. This mitigates injection attacks, meaning
|
|
||||||
untrusted user input can safely be displayed on a page.
|
|
||||||
|
|
||||||
The :func:`escape` function escapes text and returns a :class:`Markup`
|
|
||||||
object. The object won't be escaped anymore, but any text that is used
|
|
||||||
with it will be, ensuring that the result remains safe to use in HTML.
|
|
||||||
|
|
||||||
>>> from markupsafe import escape
|
|
||||||
>>> hello = escape('<em>Hello</em>')
|
|
||||||
>>> hello
|
|
||||||
Markup('<em>Hello</em>')
|
|
||||||
>>> escape(hello)
|
|
||||||
Markup('<em>Hello</em>')
|
|
||||||
>>> hello + ' <strong>World</strong>'
|
|
||||||
Markup('<em>Hello</em> <strong>World</strong>')
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The docs assume you're using Python 3. The terms "text" and "string"
|
|
||||||
refer to the :class:`str` class. In Python 2, this would be the
|
|
||||||
``unicode`` class instead.
|
|
||||||
|
|
||||||
|
|
||||||
Installing
|
|
||||||
----------
|
|
||||||
|
|
||||||
Install and update using `pip`_:
|
|
||||||
|
|
||||||
.. code-block:: text
|
|
||||||
|
|
||||||
pip install -U MarkupSafe
|
|
||||||
|
|
||||||
.. _pip: https://pip.pypa.io/en/stable/quickstart/
|
|
||||||
|
|
||||||
|
|
||||||
Table of Contents
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
|
|
||||||
escaping
|
|
||||||
html
|
|
||||||
formatting
|
|
||||||
license
|
|
||||||
changes
|
|
|
@ -1,4 +0,0 @@
|
||||||
License
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. include:: ../LICENSE.rst
|
|
|
@ -1,35 +0,0 @@
|
||||||
@ECHO OFF
|
|
||||||
|
|
||||||
pushd %~dp0
|
|
||||||
|
|
||||||
REM Command file for Sphinx documentation
|
|
||||||
|
|
||||||
if "%SPHINXBUILD%" == "" (
|
|
||||||
set SPHINXBUILD=sphinx-build
|
|
||||||
)
|
|
||||||
set SOURCEDIR=.
|
|
||||||
set BUILDDIR=_build
|
|
||||||
|
|
||||||
if "%1" == "" goto help
|
|
||||||
|
|
||||||
%SPHINXBUILD% >NUL 2>NUL
|
|
||||||
if errorlevel 9009 (
|
|
||||||
echo.
|
|
||||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
|
||||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
|
||||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
|
||||||
echo.may add the Sphinx directory to PATH.
|
|
||||||
echo.
|
|
||||||
echo.If you don't have Sphinx installed, grab it from
|
|
||||||
echo.http://sphinx-doc.org/
|
|
||||||
exit /b 1
|
|
||||||
)
|
|
||||||
|
|
||||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
|
||||||
goto end
|
|
||||||
|
|
||||||
:help
|
|
||||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
|
||||||
|
|
||||||
:end
|
|
||||||
popd
|
|
|
@ -1,2 +0,0 @@
|
||||||
Sphinx~=1.8.0
|
|
||||||
Pallets-Sphinx-Themes~=1.1.0
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
*.pyc
|
||||||
|
*.egg-info
|
||||||
|
tmp/
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
.tox/
|
||||||
|
MANIFEST
|
||||||
|
*.komodoproject
|
|
@ -0,0 +1,10 @@
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- "2.7"
|
||||||
|
- "pypy"
|
||||||
|
- "3.4"
|
||||||
|
- "3.5"
|
||||||
|
- "3.6"
|
||||||
|
- "3.7"
|
||||||
|
- "3.8"
|
||||||
|
script: python setup.py test
|
|
@ -1,6 +1,15 @@
|
||||||
appdirs Changelog
|
appdirs Changelog
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
appdirs 1.4.4
|
||||||
|
-------------
|
||||||
|
- [PR #92] Don't import appdirs from setup.py
|
||||||
|
|
||||||
|
Project officially classified as Stable which is important
|
||||||
|
for inclusion in other distros such as ActivePython.
|
||||||
|
|
||||||
|
First of several incremental releases to catch up on maintenance.
|
||||||
|
|
||||||
appdirs 1.4.3
|
appdirs 1.4.3
|
||||||
-------------
|
-------------
|
||||||
- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
|
- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
FROM activestate/activepython:2.7
|
||||||
|
|
||||||
|
# For Python3 compact
|
||||||
|
RUN apt-get -y update && apt-get -y install python3-setuptools && \
|
||||||
|
apt-get -y clean
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
ADD . /app
|
||||||
|
RUN python setup.py install && python setup.py test
|
||||||
|
RUN python3 setup.py install && python3 setup.py test
|
||||||
|
|
||||||
|
RUN python -m appdirs
|
||||||
|
RUN python3 -m appdirs
|
|
@ -0,0 +1,16 @@
|
||||||
|
# HACKING
|
||||||
|
|
||||||
|
## release
|
||||||
|
|
||||||
|
ensure correct version in CHANGES.md and appdirs.py, and:
|
||||||
|
|
||||||
|
```
|
||||||
|
python setup.py register sdist bdist_wheel upload
|
||||||
|
```
|
||||||
|
|
||||||
|
## docker image
|
||||||
|
|
||||||
|
```
|
||||||
|
docker build -t appdirs .
|
||||||
|
```
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
Metadata-Version: 1.1
|
Metadata-Version: 1.2
|
||||||
Name: appdirs
|
Name: appdirs
|
||||||
Version: 1.4.3
|
Version: 1.4.4
|
||||||
Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir".
|
Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir".
|
||||||
Home-page: http://github.com/ActiveState/appdirs
|
Home-page: http://github.com/ActiveState/appdirs
|
||||||
Author: Trent Mick; Sridhar Ratnakumar; Jeff Rouse
|
Author: Trent Mick
|
||||||
Author-email: trentm@gmail.com; github@srid.name; jr@its.to
|
Author-email: trentm@gmail.com
|
||||||
|
Maintainer: Jeff Rouse
|
||||||
|
Maintainer-email: jr@its.to
|
||||||
License: MIT
|
License: MIT
|
||||||
Description:
|
Description:
|
||||||
.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
|
.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png
|
||||||
|
@ -150,6 +152,15 @@ Description:
|
||||||
appdirs Changelog
|
appdirs Changelog
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
appdirs 1.4.4
|
||||||
|
-------------
|
||||||
|
- [PR #92] Don't import appdirs from setup.py
|
||||||
|
|
||||||
|
Project officially classified as Stable which is important
|
||||||
|
for inclusion in other distros such as ActivePython.
|
||||||
|
|
||||||
|
First of several incremental releases to catch up on maintenance.
|
||||||
|
|
||||||
appdirs 1.4.3
|
appdirs 1.4.3
|
||||||
-------------
|
-------------
|
||||||
- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
|
- [PR #76] Python 3.6 invalid escape sequence deprecation fixes
|
||||||
|
@ -234,19 +245,18 @@ Description:
|
||||||
|
|
||||||
Keywords: application directory log cache user
|
Keywords: application directory log cache user
|
||||||
Platform: UNKNOWN
|
Platform: UNKNOWN
|
||||||
Classifier: Development Status :: 4 - Beta
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
Classifier: Intended Audience :: Developers
|
Classifier: Intended Audience :: Developers
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
Classifier: Operating System :: OS Independent
|
Classifier: Operating System :: OS Independent
|
||||||
Classifier: Programming Language :: Python :: 2
|
Classifier: Programming Language :: Python :: 2
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
Classifier: Programming Language :: Python :: 3
|
Classifier: Programming Language :: Python :: 3
|
||||||
Classifier: Programming Language :: Python :: 3.2
|
|
||||||
Classifier: Programming Language :: Python :: 3.3
|
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
- add some Windows 7 examples
|
|
@ -13,8 +13,8 @@ See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||||
|
|
||||||
__version_info__ = (1, 4, 3)
|
__version__ = "1.4.4"
|
||||||
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
|
||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -7,7 +7,7 @@ try:
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
import appdirs
|
import ast
|
||||||
|
|
||||||
tests_require = []
|
tests_require = []
|
||||||
if sys.version_info < (2, 7):
|
if sys.version_info < (2, 7):
|
||||||
|
@ -21,26 +21,32 @@ def read(fname):
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# Do not import `appdirs` yet, lest we import some random version on sys.path.
|
||||||
|
for line in read("appdirs.py").splitlines():
|
||||||
|
if line.startswith("__version__"):
|
||||||
|
version = ast.literal_eval(line.split("=", 1)[1].strip())
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='appdirs',
|
name='appdirs',
|
||||||
version=appdirs.__version__,
|
version=version,
|
||||||
description='A small Python module for determining appropriate ' + \
|
description='A small Python module for determining appropriate ' + \
|
||||||
'platform-specific dirs, e.g. a "user data dir".',
|
'platform-specific dirs, e.g. a "user data dir".',
|
||||||
long_description=read('README.rst') + '\n' + read('CHANGES.rst'),
|
long_description=read('README.rst') + '\n' + read('CHANGES.rst'),
|
||||||
classifiers=[c.strip() for c in """
|
classifiers=[c.strip() for c in """
|
||||||
Development Status :: 4 - Beta
|
Development Status :: 5 - Production/Stable
|
||||||
Intended Audience :: Developers
|
Intended Audience :: Developers
|
||||||
License :: OSI Approved :: MIT License
|
License :: OSI Approved :: MIT License
|
||||||
Operating System :: OS Independent
|
Operating System :: OS Independent
|
||||||
Programming Language :: Python :: 2
|
Programming Language :: Python :: 2
|
||||||
Programming Language :: Python :: 2.6
|
|
||||||
Programming Language :: Python :: 2.7
|
Programming Language :: Python :: 2.7
|
||||||
Programming Language :: Python :: 3
|
Programming Language :: Python :: 3
|
||||||
Programming Language :: Python :: 3.2
|
|
||||||
Programming Language :: Python :: 3.3
|
|
||||||
Programming Language :: Python :: 3.4
|
Programming Language :: Python :: 3.4
|
||||||
Programming Language :: Python :: 3.5
|
Programming Language :: Python :: 3.5
|
||||||
Programming Language :: Python :: 3.6
|
Programming Language :: Python :: 3.6
|
||||||
|
Programming Language :: Python :: 3.7
|
||||||
|
Programming Language :: Python :: 3.8
|
||||||
Programming Language :: Python :: Implementation :: PyPy
|
Programming Language :: Python :: Implementation :: PyPy
|
||||||
Programming Language :: Python :: Implementation :: CPython
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
Topic :: Software Development :: Libraries :: Python Modules
|
Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
@ -50,8 +56,8 @@ setup(
|
||||||
keywords='application directory log cache user',
|
keywords='application directory log cache user',
|
||||||
author='Trent Mick',
|
author='Trent Mick',
|
||||||
author_email='trentm@gmail.com',
|
author_email='trentm@gmail.com',
|
||||||
maintainer='Trent Mick; Sridhar Ratnakumar; Jeff Rouse',
|
maintainer='Jeff Rouse',
|
||||||
maintainer_email='trentm@gmail.com; github@srid.name; jr@its.to',
|
maintainer_email='jr@its.to',
|
||||||
url='http://github.com/ActiveState/appdirs',
|
url='http://github.com/ActiveState/appdirs',
|
||||||
license='MIT',
|
license='MIT',
|
||||||
py_modules=["appdirs"],
|
py_modules=["appdirs"],
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
[tox]
|
||||||
|
envlist = py26, py27, py32, py33, py34, py35, py36
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
commands = python setup.py test
|
|
@ -1,17 +1,36 @@
|
||||||
version: 2.1
|
version: 2.1
|
||||||
|
|
||||||
commands:
|
commands:
|
||||||
test-python-version:
|
test-start:
|
||||||
parameters:
|
|
||||||
requirements-file:
|
|
||||||
type: string
|
|
||||||
default: "requirements_dev.txt"
|
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run:
|
- run:
|
||||||
name: environment
|
name: environment
|
||||||
command: |
|
command: |
|
||||||
echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV
|
echo 'export PATH=.:$HOME/.local/bin:$PATH' >> $BASH_ENV
|
||||||
|
|
||||||
|
test-min-requirements:
|
||||||
|
steps:
|
||||||
|
- run:
|
||||||
|
name: install minimum requirements
|
||||||
|
command: |
|
||||||
|
# Use requirements-builder to determine the minimum versions of
|
||||||
|
# all requirements and test those
|
||||||
|
# We install requirements-builder itself into its own venv, since
|
||||||
|
# otherwise its dependencies might install newer versions of
|
||||||
|
# glean_parser's dependencies.
|
||||||
|
python3 -m venv .rb
|
||||||
|
.rb/bin/pip install requirements-builder
|
||||||
|
.rb/bin/requirements-builder --level=min setup.py > min_requirements.txt
|
||||||
|
|
||||||
|
pip install --progress-bar off --user -U -r min_requirements.txt
|
||||||
|
|
||||||
|
test-python-version:
|
||||||
|
parameters:
|
||||||
|
requirements-file:
|
||||||
|
type: string
|
||||||
|
default: "requirements_dev.txt"
|
||||||
|
steps:
|
||||||
- run:
|
- run:
|
||||||
name: install
|
name: install
|
||||||
command: |
|
command: |
|
||||||
|
@ -37,23 +56,26 @@ commands:
|
||||||
command: make test
|
command: make test
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-35:
|
|
||||||
docker:
|
|
||||||
- image: circleci/python:3.5.9
|
|
||||||
steps:
|
|
||||||
- test-python-version:
|
|
||||||
requirements-file: requirements_dev_py35.txt
|
|
||||||
|
|
||||||
build-36:
|
build-36:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/python:3.6.9
|
- image: circleci/python:3.6.9
|
||||||
steps:
|
steps:
|
||||||
|
- test-start
|
||||||
|
- test-python-version
|
||||||
|
|
||||||
|
build-36-min:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6.9
|
||||||
|
steps:
|
||||||
|
- test-start
|
||||||
|
- test-min-requirements
|
||||||
- test-python-version
|
- test-python-version
|
||||||
|
|
||||||
build-37:
|
build-37:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/python:3.7.5
|
- image: circleci/python:3.7.5
|
||||||
steps:
|
steps:
|
||||||
|
- test-start
|
||||||
- test-python-version
|
- test-python-version
|
||||||
- run:
|
- run:
|
||||||
name: make-docs
|
name: make-docs
|
||||||
|
@ -68,6 +90,15 @@ jobs:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/python:3.8.0
|
- image: circleci/python:3.8.0
|
||||||
steps:
|
steps:
|
||||||
|
- test-start
|
||||||
|
- test-python-version
|
||||||
|
|
||||||
|
build-38-min:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.8.0
|
||||||
|
steps:
|
||||||
|
- test-start
|
||||||
|
- test-min-requirements
|
||||||
- test-python-version
|
- test-python-version
|
||||||
|
|
||||||
docs-deploy:
|
docs-deploy:
|
||||||
|
@ -115,11 +146,11 @@ workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build:
|
build:
|
||||||
jobs:
|
jobs:
|
||||||
- build-35:
|
- build-36:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
only: /.*/
|
only: /.*/
|
||||||
- build-36:
|
- build-36-min:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
only: /.*/
|
only: /.*/
|
||||||
|
@ -131,12 +162,16 @@ workflows:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
only: /.*/
|
only: /.*/
|
||||||
|
- build-38-min:
|
||||||
|
filters:
|
||||||
|
tags:
|
||||||
|
only: /.*/
|
||||||
- docs-deploy:
|
- docs-deploy:
|
||||||
requires:
|
requires:
|
||||||
- build-37
|
- build-37
|
||||||
filters:
|
filters:
|
||||||
branches:
|
branches:
|
||||||
only: master
|
only: main
|
||||||
- pypi-deploy:
|
- pypi-deploy:
|
||||||
requires:
|
requires:
|
||||||
- build-37
|
- build-37
|
||||||
|
|
|
@ -87,10 +87,6 @@ Ready to contribute? Here's how to set up `glean_parser` for local development.
|
||||||
|
|
||||||
$ pip install -r requirements_dev.txt
|
$ pip install -r requirements_dev.txt
|
||||||
|
|
||||||
If using Python 3.5:
|
|
||||||
|
|
||||||
$ pip install -r requirements_dev_35.txt
|
|
||||||
|
|
||||||
Optionally, if you want to ensure that the generated Kotlin code lints correctly, install a Java SDK, and then run::
|
Optionally, if you want to ensure that the generated Kotlin code lints correctly, install a Java SDK, and then run::
|
||||||
|
|
||||||
$ make install-kotlin-linters
|
$ make install-kotlin-linters
|
||||||
|
@ -117,7 +113,7 @@ Before you submit a pull request, check that it meets these guidelines:
|
||||||
2. If the pull request adds functionality, the docs should be updated. Put
|
2. If the pull request adds functionality, the docs should be updated. Put
|
||||||
your new functionality into a function with a docstring, and add the
|
your new functionality into a function with a docstring, and add the
|
||||||
feature to the list in README.rst.
|
feature to the list in README.rst.
|
||||||
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8 (The CI system will take care of testing all of these Python versions).
|
3. The pull request should work for Python 3.6, 3.7 and 3.8 (The CI system will take care of testing all of these Python versions).
|
||||||
4. The pull request should update the changelog in `HISTORY.rst`.
|
4. The pull request should update the changelog in `HISTORY.rst`.
|
||||||
|
|
||||||
Tips
|
Tips
|
||||||
|
@ -133,11 +129,11 @@ Deploying
|
||||||
|
|
||||||
A reminder for the maintainers on how to deploy.
|
A reminder for the maintainers on how to deploy.
|
||||||
|
|
||||||
Get a clean master branch with all of the changes from `upstream`::
|
Get a clean main branch with all of the changes from `upstream`::
|
||||||
|
|
||||||
$ git checkout master
|
$ git checkout main
|
||||||
$ git fetch upstream
|
$ git fetch upstream
|
||||||
$ git rebase upstream/master
|
$ git rebase upstream/main
|
||||||
|
|
||||||
- Update the header with the new version and date in HISTORY.rst.
|
- Update the header with the new version and date in HISTORY.rst.
|
||||||
|
|
||||||
|
@ -147,10 +143,9 @@ Get a clean master branch with all of the changes from `upstream`::
|
||||||
|
|
||||||
- Push the changes upstream::
|
- Push the changes upstream::
|
||||||
|
|
||||||
$ git push upstream master
|
$ git push upstream main
|
||||||
|
|
||||||
- Wait for [continuous integration to
|
- Wait for [continuous integration to pass](https://circleci.com/gh/mozilla/glean/tree/main) on main.
|
||||||
pass](https://circleci.com/gh/mozilla/glean/tree/master) on master.
|
|
||||||
|
|
||||||
- Make the release on GitHub using [this link](https://github.com/mozilla/glean_parser/releases/new)
|
- Make the release on GitHub using [this link](https://github.com/mozilla/glean_parser/releases/new)
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,94 @@ History
|
||||||
Unreleased
|
Unreleased
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
1.28.0 (2020-07-23)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** The internal ping `deletion-request` was misnamed in pings.py causing the linter to not allow use of the correctly named ping for adding legacy ids to. Consuming apps will need to update their metrics.yaml if they are using `deletion_request` in any `send_in_pings` to `deletion-request` after updating.
|
||||||
|
|
||||||
|
1.27.0 (2020-07-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Rename the `data_category` field to `data_sensitivity` to be clearer.
|
||||||
|
|
||||||
|
1.26.0 (2020-07-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Add support for JWE metric types.
|
||||||
|
* Add a `data_sensitivity` field to all metrics for specifying the type of data collected in the field.
|
||||||
|
|
||||||
|
1.25.0 (2020-07-17)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Add support for generating C# code.
|
||||||
|
* BUGFIX: The memory unit is now correctly passed to the MemoryDistribution
|
||||||
|
metric type in Swift.
|
||||||
|
|
||||||
|
1.24.0 (2020-06-30)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* BUGFIX: look for metrics in send_if_empty pings. Metrics for these kinds of pings were being ignored.
|
||||||
|
|
||||||
|
1.23.0 (2020-06-27)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Support for Python 3.5 has been dropped.
|
||||||
|
* BUGFIX: The ordering of event extra keys will now match with their enum, fixing a serious bug where keys of extras may not match the correct values in the data payload. See https://bugzilla.mozilla.org/show_bug.cgi?id=1648768.
|
||||||
|
|
||||||
|
1.22.0 (2020-05-28)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** (Swift only) Combine all metrics and pings into a single generated file `Metrics.swift`.
|
||||||
|
|
||||||
|
1.21.0 (2020-05-25)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* `glinter` messages have been improved with more details and to be more
|
||||||
|
actionable.
|
||||||
|
* A maximum of 10 `extra_keys` is now enforced for `event` metric types.
|
||||||
|
* BUGFIX: the `Lifetime` enum values now match the values of the implementation in mozilla/glean.
|
||||||
|
|
||||||
|
1.20.4 (2020-05-07)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* BUGFIX: yamllint errors are now reported using the correct file name.
|
||||||
|
|
||||||
|
1.20.3 (2020-05-06)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Support for using `timing_distribution`'s `time_unit` parameter to control the range of acceptable values is documented. The default unit for this use case is `nanosecond` to avoid creating a breaking change. See [bug 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more information.
|
||||||
|
|
||||||
|
1.20.2 (2020-04-24)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Dependencies that depend on the version of Python being used are now specified using the `Declaring platform specific dependencies syntax in setuptools <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies>`__. This means that more recent versions of dependencies are likely to be installed on Python 3.6 and later, and unnecessary backport libraries won't be installed on more recent Python versions.
|
||||||
|
|
||||||
|
1.20.1 (2020-04-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* The minimum version of the runtime dependencies has been lowered to increase compatibility with other tools. These minimum versions are now tested in CI, in addition to testing the latest versions of the dependencies that was already happening in CI.
|
||||||
|
|
||||||
|
1.20.0 (2020-04-15)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** glinter errors found during the `translate` command will now return an error code. glinter warnings will be displayed, but not return an error code.
|
||||||
|
* `glean_parser` now produces a linter warning when `user` lifetime metrics are
|
||||||
|
set to expire. See [bug 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854)
|
||||||
|
for additional context.
|
||||||
|
|
||||||
|
1.19.0 (2020-03-18)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** The regular expression used to validate labels is
|
||||||
|
stricter and more correct.
|
||||||
|
* Add more information about pings to markdown documentation:
|
||||||
|
* State whether the ping includes client id;
|
||||||
|
* Add list of data review links;
|
||||||
|
* Add list of related bugs links.
|
||||||
|
* `glean_parser` now makes it easier to write external translation functions for
|
||||||
|
different language targets.
|
||||||
|
* BUGFIX: glean_parser now works on 32-bit Windows.
|
||||||
|
|
||||||
1.18.3 (2020-02-24)
|
1.18.3 (2020-02-24)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ clean-build: ## remove build artifacts
|
||||||
rm -fr dist/
|
rm -fr dist/
|
||||||
rm -fr .eggs/
|
rm -fr .eggs/
|
||||||
find . -name '*.egg-info' -exec rm -fr {} +
|
find . -name '*.egg-info' -exec rm -fr {} +
|
||||||
find . -name '*.egg' -exec rm -f {} +
|
find . -name '*.egg' -exec rm -fr {} +
|
||||||
|
|
||||||
clean-pyc: ## remove Python file artifacts
|
clean-pyc: ## remove Python file artifacts
|
||||||
find . -name '*.pyc' -exec rm -f {} +
|
find . -name '*.pyc' -exec rm -f {} +
|
||||||
|
@ -36,10 +36,11 @@ clean-test: ## remove test and coverage artifacts
|
||||||
|
|
||||||
lint: ## check style with flake8
|
lint: ## check style with flake8
|
||||||
python3 -m flake8 glean_parser tests
|
python3 -m flake8 glean_parser tests
|
||||||
bash -c 'if [[ `python3 --version` =~ "Python 3\.[678]\..*" ]]; then \
|
if python3 --version | grep 'Python 3\.[678]\..*'; then \
|
||||||
python3 -m black --check glean_parser tests setup.py; \
|
python3 -m black --check glean_parser tests setup.py; \
|
||||||
fi'
|
fi
|
||||||
python3 -m yamllint glean_parser tests
|
python3 -m yamllint glean_parser tests
|
||||||
|
python3 -m mypy glean_parser
|
||||||
|
|
||||||
test: ## run tests quickly with the default Python
|
test: ## run tests quickly with the default Python
|
||||||
py.test
|
py.test
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
Metadata-Version: 1.1
|
Metadata-Version: 1.1
|
||||||
Name: glean_parser
|
Name: glean_parser
|
||||||
Version: 1.18.3
|
Version: 1.28.0
|
||||||
Summary: Parser tools for Mozilla's Glean telemetry
|
Summary: Parser tools for Mozilla's Glean telemetry
|
||||||
Home-page: https://github.com/mozilla/glean_parser
|
Home-page: https://github.com/mozilla/glean_parser
|
||||||
Author: Michael Droettboom
|
Author: Michael Droettboom
|
||||||
|
@ -26,7 +26,7 @@ Description: ============
|
||||||
Requirements
|
Requirements
|
||||||
------------
|
------------
|
||||||
|
|
||||||
- Python 3.5 (or later)
|
- Python 3.6 (or later)
|
||||||
|
|
||||||
The following library requirements are installed automatically when glean_parser
|
The following library requirements are installed automatically when glean_parser
|
||||||
is installed by `pip`.
|
is installed by `pip`.
|
||||||
|
@ -38,14 +38,10 @@ Description: ============
|
||||||
- jsonschema
|
- jsonschema
|
||||||
- PyYAML
|
- PyYAML
|
||||||
|
|
||||||
Additionally on Python 3.6 and 3.5:
|
Additionally on Python 3.6:
|
||||||
|
|
||||||
- iso8601
|
- iso8601
|
||||||
|
|
||||||
And on Python 3.5:
|
|
||||||
|
|
||||||
- pep487
|
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
-----
|
-----
|
||||||
|
|
||||||
|
@ -73,6 +69,94 @@ Description: ============
|
||||||
Unreleased
|
Unreleased
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
1.28.0 (2020-07-23)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** The internal ping `deletion-request` was misnamed in pings.py causing the linter to not allow use of the correctly named ping for adding legacy ids to. Consuming apps will need to update their metrics.yaml if they are using `deletion_request` in any `send_in_pings` to `deletion-request` after updating.
|
||||||
|
|
||||||
|
1.27.0 (2020-07-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Rename the `data_category` field to `data_sensitivity` to be clearer.
|
||||||
|
|
||||||
|
1.26.0 (2020-07-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Add support for JWE metric types.
|
||||||
|
* Add a `data_sensitivity` field to all metrics for specifying the type of data collected in the field.
|
||||||
|
|
||||||
|
1.25.0 (2020-07-17)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Add support for generating C# code.
|
||||||
|
* BUGFIX: The memory unit is now correctly passed to the MemoryDistribution
|
||||||
|
metric type in Swift.
|
||||||
|
|
||||||
|
1.24.0 (2020-06-30)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* BUGFIX: look for metrics in send_if_empty pings. Metrics for these kinds of pings were being ignored.
|
||||||
|
|
||||||
|
1.23.0 (2020-06-27)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Support for Python 3.5 has been dropped.
|
||||||
|
* BUGFIX: The ordering of event extra keys will now match with their enum, fixing a serious bug where keys of extras may not match the correct values in the data payload. See https://bugzilla.mozilla.org/show_bug.cgi?id=1648768.
|
||||||
|
|
||||||
|
1.22.0 (2020-05-28)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** (Swift only) Combine all metrics and pings into a single generated file `Metrics.swift`.
|
||||||
|
|
||||||
|
1.21.0 (2020-05-25)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* `glinter` messages have been improved with more details and to be more
|
||||||
|
actionable.
|
||||||
|
* A maximum of 10 `extra_keys` is now enforced for `event` metric types.
|
||||||
|
* BUGFIX: the `Lifetime` enum values now match the values of the implementation in mozilla/glean.
|
||||||
|
|
||||||
|
1.20.4 (2020-05-07)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* BUGFIX: yamllint errors are now reported using the correct file name.
|
||||||
|
|
||||||
|
1.20.3 (2020-05-06)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Support for using `timing_distribution`'s `time_unit` parameter to control the range of acceptable values is documented. The default unit for this use case is `nanosecond` to avoid creating a breaking change. See [bug 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more information.
|
||||||
|
|
||||||
|
1.20.2 (2020-04-24)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* Dependencies that depend on the version of Python being used are now specified using the `Declaring platform specific dependencies syntax in setuptools <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies>`__. This means that more recent versions of dependencies are likely to be installed on Python 3.6 and later, and unnecessary backport libraries won't be installed on more recent Python versions.
|
||||||
|
|
||||||
|
1.20.1 (2020-04-21)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* The minimum version of the runtime dependencies has been lowered to increase compatibility with other tools. These minimum versions are now tested in CI, in addition to testing the latest versions of the dependencies that was already happening in CI.
|
||||||
|
|
||||||
|
1.20.0 (2020-04-15)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** glinter errors found during the `translate` command will now return an error code. glinter warnings will be displayed, but not return an error code.
|
||||||
|
* `glean_parser` now produces a linter warning when `user` lifetime metrics are
|
||||||
|
set to expire. See [bug 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854)
|
||||||
|
for additional context.
|
||||||
|
|
||||||
|
1.19.0 (2020-03-18)
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
* **Breaking change:** The regular expression used to validate labels is
|
||||||
|
stricter and more correct.
|
||||||
|
* Add more information about pings to markdown documentation:
|
||||||
|
* State whether the ping includes client id;
|
||||||
|
* Add list of data review links;
|
||||||
|
* Add list of related bugs links.
|
||||||
|
* `glean_parser` now makes it easier to write external translation functions for
|
||||||
|
different language targets.
|
||||||
|
* BUGFIX: glean_parser now works on 32-bit Windows.
|
||||||
|
|
||||||
1.18.3 (2020-02-24)
|
1.18.3 (2020-02-24)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
@ -353,7 +437,6 @@ Classifier: Development Status :: 2 - Pre-Alpha
|
||||||
Classifier: Intended Audience :: Developers
|
Classifier: Intended Audience :: Developers
|
||||||
Classifier: Natural Language :: English
|
Classifier: Natural Language :: English
|
||||||
Classifier: Programming Language :: Python :: 3
|
Classifier: Programming Language :: Python :: 3
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
|
|
@ -18,7 +18,7 @@ The full documentation is available `here <https://mozilla.github.io/glean_parse
|
||||||
Requirements
|
Requirements
|
||||||
------------
|
------------
|
||||||
|
|
||||||
- Python 3.5 (or later)
|
- Python 3.6 (or later)
|
||||||
|
|
||||||
The following library requirements are installed automatically when glean_parser
|
The following library requirements are installed automatically when glean_parser
|
||||||
is installed by `pip`.
|
is installed by `pip`.
|
||||||
|
@ -30,14 +30,10 @@ is installed by `pip`.
|
||||||
- jsonschema
|
- jsonschema
|
||||||
- PyYAML
|
- PyYAML
|
||||||
|
|
||||||
Additionally on Python 3.6 and 3.5:
|
Additionally on Python 3.6:
|
||||||
|
|
||||||
- iso8601
|
- iso8601
|
||||||
|
|
||||||
And on Python 3.5:
|
|
||||||
|
|
||||||
- pep487
|
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
-----
|
-----
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,148 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Outputter to generate C# code for metrics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Union # noqa
|
||||||
|
|
||||||
|
from . import metrics
|
||||||
|
from . import pings
|
||||||
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
|
def csharp_datatypes_filter(value: util.JSONType) -> str:
|
||||||
|
"""
|
||||||
|
A Jinja2 filter that renders C# literals.
|
||||||
|
|
||||||
|
Based on Python's JSONEncoder, but overrides:
|
||||||
|
- lists to use `new string[] {}` (only strings)
|
||||||
|
- dicts to use mapOf (not currently supported)
|
||||||
|
- sets to use setOf (not currently supported)
|
||||||
|
- enums to use the like-named C# enum
|
||||||
|
"""
|
||||||
|
|
||||||
|
class CSharpEncoder(json.JSONEncoder):
|
||||||
|
def iterencode(self, value):
|
||||||
|
if isinstance(value, list):
|
||||||
|
assert all(isinstance(x, str) for x in value)
|
||||||
|
yield "new string[] {"
|
||||||
|
first = True
|
||||||
|
for subvalue in value:
|
||||||
|
if not first:
|
||||||
|
yield ", "
|
||||||
|
yield from self.iterencode(subvalue)
|
||||||
|
first = False
|
||||||
|
yield "}"
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
yield "mapOf("
|
||||||
|
first = True
|
||||||
|
for key, subvalue in value.items():
|
||||||
|
if not first:
|
||||||
|
yield ", "
|
||||||
|
yield from self.iterencode(key)
|
||||||
|
yield " to "
|
||||||
|
yield from self.iterencode(subvalue)
|
||||||
|
first = False
|
||||||
|
yield ")"
|
||||||
|
elif isinstance(value, enum.Enum):
|
||||||
|
yield (value.__class__.__name__ + "." + util.Camelize(value.name))
|
||||||
|
elif isinstance(value, set):
|
||||||
|
yield "setOf("
|
||||||
|
first = True
|
||||||
|
for subvalue in sorted(list(value)):
|
||||||
|
if not first:
|
||||||
|
yield ", "
|
||||||
|
yield from self.iterencode(subvalue)
|
||||||
|
first = False
|
||||||
|
yield ")"
|
||||||
|
else:
|
||||||
|
yield from super().iterencode(value)
|
||||||
|
|
||||||
|
return "".join(CSharpEncoder().iterencode(value))
|
||||||
|
|
||||||
|
|
||||||
|
def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
|
||||||
|
"""
|
||||||
|
Returns the C# type to use for a given metric or ping object.
|
||||||
|
"""
|
||||||
|
generate_enums = getattr(obj, "_generate_enums", [])
|
||||||
|
if len(generate_enums):
|
||||||
|
template_args = []
|
||||||
|
for member, suffix in generate_enums:
|
||||||
|
if len(getattr(obj, member)):
|
||||||
|
template_args.append(util.camelize(obj.name) + suffix)
|
||||||
|
else:
|
||||||
|
if suffix == "Keys":
|
||||||
|
template_args.append("NoExtraKeys")
|
||||||
|
else:
|
||||||
|
template_args.append("No" + suffix)
|
||||||
|
|
||||||
|
return "{}<{}>".format(class_name(obj.type), ", ".join(template_args))
|
||||||
|
|
||||||
|
return class_name(obj.type)
|
||||||
|
|
||||||
|
|
||||||
|
def class_name(obj_type: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns the C# class name for a given metric or ping type.
|
||||||
|
"""
|
||||||
|
if obj_type == "ping":
|
||||||
|
return "PingType"
|
||||||
|
if obj_type.startswith("labeled_"):
|
||||||
|
obj_type = obj_type[8:]
|
||||||
|
return util.Camelize(obj_type) + "MetricType"
|
||||||
|
|
||||||
|
|
||||||
|
def output_csharp(
|
||||||
|
objs: metrics.ObjectTree, output_dir: Path, options: Dict[str, Any] = {}
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Given a tree of objects, output C# code to `output_dir`.
|
||||||
|
|
||||||
|
:param objects: A tree of objects (metrics and pings) as returned from
|
||||||
|
`parser.parse_objects`.
|
||||||
|
:param output_dir: Path to an output directory to write to.
|
||||||
|
:param options: options dictionary, with the following optional keys:
|
||||||
|
|
||||||
|
- `namespace`: The package namespace to declare at the top of the
|
||||||
|
generated files. Defaults to `GleanMetrics`.
|
||||||
|
- `glean_namespace`: The package namespace of the glean library itself.
|
||||||
|
This is where glean objects will be imported from in the generated
|
||||||
|
code.
|
||||||
|
"""
|
||||||
|
template = util.get_jinja2_template(
|
||||||
|
"csharp.jinja2",
|
||||||
|
filters=(
|
||||||
|
("csharp", csharp_datatypes_filter),
|
||||||
|
("type_name", type_name),
|
||||||
|
("class_name", class_name),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
namespace = options.get("namespace", "GleanMetrics")
|
||||||
|
glean_namespace = options.get("glean_namespace", "Mozilla.Glean")
|
||||||
|
|
||||||
|
for category_key, category_val in objs.items():
|
||||||
|
filename = util.Camelize(category_key) + ".cs"
|
||||||
|
filepath = output_dir / filename
|
||||||
|
|
||||||
|
with filepath.open("w", encoding="utf-8") as fd:
|
||||||
|
fd.write(
|
||||||
|
template.render(
|
||||||
|
category_name=category_key,
|
||||||
|
objs=category_val,
|
||||||
|
extra_args=util.extra_args,
|
||||||
|
namespace=namespace,
|
||||||
|
glean_namespace=glean_namespace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Jinja2 squashes the final newline, so we explicitly add it
|
||||||
|
fd.write("\n")
|
|
@ -11,11 +11,15 @@ Outputter to generate Kotlin code for metrics.
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Union # noqa
|
||||||
|
|
||||||
|
from . import metrics
|
||||||
|
from . import pings
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
def kotlin_datatypes_filter(value):
|
def kotlin_datatypes_filter(value: util.JSONType) -> str:
|
||||||
"""
|
"""
|
||||||
A Jinja2 filter that renders Kotlin literals.
|
A Jinja2 filter that renders Kotlin literals.
|
||||||
|
|
||||||
|
@ -65,7 +69,7 @@ def kotlin_datatypes_filter(value):
|
||||||
return "".join(KotlinEncoder().iterencode(value))
|
return "".join(KotlinEncoder().iterencode(value))
|
||||||
|
|
||||||
|
|
||||||
def type_name(obj):
|
def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
|
||||||
"""
|
"""
|
||||||
Returns the Kotlin type to use for a given metric or ping object.
|
Returns the Kotlin type to use for a given metric or ping object.
|
||||||
"""
|
"""
|
||||||
|
@ -86,7 +90,7 @@ def type_name(obj):
|
||||||
return class_name(obj.type)
|
return class_name(obj.type)
|
||||||
|
|
||||||
|
|
||||||
def class_name(obj_type):
|
def class_name(obj_type: str) -> str:
|
||||||
"""
|
"""
|
||||||
Returns the Kotlin class name for a given metric or ping type.
|
Returns the Kotlin class name for a given metric or ping type.
|
||||||
"""
|
"""
|
||||||
|
@ -97,13 +101,15 @@ def class_name(obj_type):
|
||||||
return util.Camelize(obj_type) + "MetricType"
|
return util.Camelize(obj_type) + "MetricType"
|
||||||
|
|
||||||
|
|
||||||
def output_gecko_lookup(objs, output_dir, options={}):
|
def output_gecko_lookup(
|
||||||
|
objs: metrics.ObjectTree, output_dir: Path, options: Dict[str, Any] = {}
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Given a tree of objects, generate a Kotlin map between Gecko histograms and
|
Given a tree of objects, generate a Kotlin map between Gecko histograms and
|
||||||
Glean SDK metric types.
|
Glean SDK metric types.
|
||||||
|
|
||||||
:param objects: A tree of objects (metrics and pings) as returned from
|
:param objects: A tree of objects (metrics and pings) as returned from
|
||||||
`parser.parse_objects`.
|
`parser.parse_objects`.
|
||||||
:param output_dir: Path to an output directory to write to.
|
:param output_dir: Path to an output directory to write to.
|
||||||
:param options: options dictionary, with the following optional keys:
|
:param options: options dictionary, with the following optional keys:
|
||||||
|
|
||||||
|
@ -138,7 +144,9 @@ def output_gecko_lookup(objs, output_dir, options={}):
|
||||||
# },
|
# },
|
||||||
# "other-type": {}
|
# "other-type": {}
|
||||||
# }
|
# }
|
||||||
gecko_metrics = OrderedDict()
|
gecko_metrics: OrderedDict[
|
||||||
|
str, OrderedDict[str, List[Dict[str, str]]]
|
||||||
|
] = OrderedDict()
|
||||||
|
|
||||||
# Define scalar-like types.
|
# Define scalar-like types.
|
||||||
SCALAR_LIKE_TYPES = ["boolean", "string", "quantity"]
|
SCALAR_LIKE_TYPES = ["boolean", "string", "quantity"]
|
||||||
|
@ -148,7 +156,9 @@ def output_gecko_lookup(objs, output_dir, options={}):
|
||||||
# Glean SDK and GeckoView. See bug 1566356 for more context.
|
# Glean SDK and GeckoView. See bug 1566356 for more context.
|
||||||
for metric in category_val.values():
|
for metric in category_val.values():
|
||||||
# This is not a Gecko metric, skip it.
|
# This is not a Gecko metric, skip it.
|
||||||
if not getattr(metric, "gecko_datapoint", False):
|
if isinstance(metric, pings.Ping) or not getattr(
|
||||||
|
metric, "gecko_datapoint", False
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Put scalars in their own categories, histogram-like in "histograms" and
|
# Put scalars in their own categories, histogram-like in "histograms" and
|
||||||
|
@ -186,12 +196,14 @@ def output_gecko_lookup(objs, output_dir, options={}):
|
||||||
fd.write("\n")
|
fd.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def output_kotlin(objs, output_dir, options={}):
|
def output_kotlin(
|
||||||
|
objs: metrics.ObjectTree, output_dir: Path, options: Dict[str, Any] = {}
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Given a tree of objects, output Kotlin code to `output_dir`.
|
Given a tree of objects, output Kotlin code to `output_dir`.
|
||||||
|
|
||||||
:param objects: A tree of objects (metrics and pings) as returned from
|
:param objects: A tree of objects (metrics and pings) as returned from
|
||||||
`parser.parse_objects`.
|
`parser.parse_objects`.
|
||||||
:param output_dir: Path to an output directory to write to.
|
:param output_dir: Path to an output directory to write to.
|
||||||
:param options: options dictionary, with the following optional keys:
|
:param options: options dictionary, with the following optional keys:
|
||||||
|
|
||||||
|
@ -210,25 +222,6 @@ def output_kotlin(objs, output_dir, options={}):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# The object parameters to pass to constructors
|
|
||||||
extra_args = [
|
|
||||||
"allowed_extra_keys",
|
|
||||||
"bucket_count",
|
|
||||||
"category",
|
|
||||||
"disabled",
|
|
||||||
"histogram_type",
|
|
||||||
"include_client_id",
|
|
||||||
"send_if_empty",
|
|
||||||
"lifetime",
|
|
||||||
"memory_unit",
|
|
||||||
"name",
|
|
||||||
"range_max",
|
|
||||||
"range_min",
|
|
||||||
"reason_codes",
|
|
||||||
"send_in_pings",
|
|
||||||
"time_unit",
|
|
||||||
]
|
|
||||||
|
|
||||||
namespace = options.get("namespace", "GleanMetrics")
|
namespace = options.get("namespace", "GleanMetrics")
|
||||||
glean_namespace = options.get("glean_namespace", "mozilla.components.service.glean")
|
glean_namespace = options.get("glean_namespace", "mozilla.components.service.glean")
|
||||||
|
|
||||||
|
@ -249,7 +242,7 @@ def output_kotlin(objs, output_dir, options={}):
|
||||||
category_name=category_key,
|
category_name=category_key,
|
||||||
objs=category_val,
|
objs=category_val,
|
||||||
obj_types=obj_types,
|
obj_types=obj_types,
|
||||||
extra_args=extra_args,
|
extra_args=util.extra_args,
|
||||||
namespace=namespace,
|
namespace=namespace,
|
||||||
has_labeled_metrics=has_labeled_metrics,
|
has_labeled_metrics=has_labeled_metrics,
|
||||||
glean_namespace=glean_namespace,
|
glean_namespace=glean_namespace,
|
||||||
|
|
|
@ -3,25 +3,53 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
|
||||||
|
import enum
|
||||||
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any, Callable, Dict, Generator, List, Iterable, Tuple, Union # noqa
|
||||||
|
|
||||||
|
|
||||||
|
from . import metrics
|
||||||
from . import parser
|
from . import parser
|
||||||
|
from . import pings
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
from yamllint.config import YamlLintConfig
|
|
||||||
from yamllint import linter
|
from yamllint.config import YamlLintConfig # type: ignore
|
||||||
|
from yamllint import linter # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _split_words(name):
|
LintGenerator = Generator[str, None, None]
|
||||||
|
|
||||||
|
|
||||||
|
class CheckType(enum.Enum):
|
||||||
|
warning = 0
|
||||||
|
error = 1
|
||||||
|
|
||||||
|
|
||||||
|
def _split_words(name: str) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Helper function to split words on either `.` or `_`.
|
Helper function to split words on either `.` or `_`.
|
||||||
"""
|
"""
|
||||||
return re.split("[._]", name)
|
return re.split("[._]", name)
|
||||||
|
|
||||||
|
|
||||||
def _hamming_distance(str1, str2):
|
def _english_list(items: List[str]) -> str:
|
||||||
|
"""
|
||||||
|
Helper function to format a list [A, B, C] as "'A', 'B', or 'C'".
|
||||||
|
"""
|
||||||
|
if len(items) == 0:
|
||||||
|
return ""
|
||||||
|
elif len(items) == 1:
|
||||||
|
return f"'{items[0]}'"
|
||||||
|
else:
|
||||||
|
return "{}, or '{}'".format(
|
||||||
|
", ".join([f"'{x}'" for x in items[:-1]]), items[-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _hamming_distance(str1: str, str2: str) -> int:
|
||||||
"""
|
"""
|
||||||
Count the # of differences between strings str1 and str2,
|
Count the # of differences between strings str1 and str2,
|
||||||
padding the shorter one with whitespace
|
padding the shorter one with whitespace
|
||||||
|
@ -39,7 +67,9 @@ def _hamming_distance(str1, str2):
|
||||||
return diffs
|
return diffs
|
||||||
|
|
||||||
|
|
||||||
def check_common_prefix(category_name, metrics):
|
def check_common_prefix(
|
||||||
|
category_name: str, metrics: Iterable[metrics.Metric]
|
||||||
|
) -> LintGenerator:
|
||||||
"""
|
"""
|
||||||
Check if all metrics begin with a common prefix.
|
Check if all metrics begin with a common prefix.
|
||||||
"""
|
"""
|
||||||
|
@ -58,12 +88,16 @@ def check_common_prefix(category_name, metrics):
|
||||||
if i > 0:
|
if i > 0:
|
||||||
common_prefix = "_".join(first[:i])
|
common_prefix = "_".join(first[:i])
|
||||||
yield (
|
yield (
|
||||||
"Within category '{}', all metrics begin with prefix "
|
f"Within category '{category_name}', all metrics begin with "
|
||||||
"'{}'. Remove prefixes and (possibly) rename category."
|
f"prefix '{common_prefix}'."
|
||||||
).format(category_name, common_prefix)
|
"Remove the prefixes on the metric names and (possibly) "
|
||||||
|
"rename the category."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_unit_in_name(metric, parser_config={}):
|
def check_unit_in_name(
|
||||||
|
metric: metrics.Metric, parser_config: Dict[str, Any] = {}
|
||||||
|
) -> LintGenerator:
|
||||||
"""
|
"""
|
||||||
The metric name ends in a unit.
|
The metric name ends in a unit.
|
||||||
"""
|
"""
|
||||||
|
@ -87,105 +121,160 @@ def check_unit_in_name(metric, parser_config={}):
|
||||||
name_words = _split_words(metric.name)
|
name_words = _split_words(metric.name)
|
||||||
unit_in_name = name_words[-1]
|
unit_in_name = name_words[-1]
|
||||||
|
|
||||||
if hasattr(metric, "time_unit"):
|
time_unit = getattr(metric, "time_unit", None)
|
||||||
|
memory_unit = getattr(metric, "memory_unit", None)
|
||||||
|
unit = getattr(metric, "unit", None)
|
||||||
|
|
||||||
|
if time_unit is not None:
|
||||||
if (
|
if (
|
||||||
unit_in_name == TIME_UNIT_ABBREV.get(metric.time_unit.name)
|
unit_in_name == TIME_UNIT_ABBREV.get(time_unit.name)
|
||||||
or unit_in_name == metric.time_unit.name
|
or unit_in_name == time_unit.name
|
||||||
):
|
):
|
||||||
yield (
|
yield (
|
||||||
"Suffix '{}' is redundant with time_unit. " "Only include time_unit."
|
f"Suffix '{unit_in_name}' is redundant with time_unit "
|
||||||
).format(unit_in_name)
|
f"'{time_unit.name}'. Only include time_unit."
|
||||||
|
)
|
||||||
elif (
|
elif (
|
||||||
unit_in_name in TIME_UNIT_ABBREV.keys()
|
unit_in_name in TIME_UNIT_ABBREV.keys()
|
||||||
or unit_in_name in TIME_UNIT_ABBREV.values()
|
or unit_in_name in TIME_UNIT_ABBREV.values()
|
||||||
):
|
):
|
||||||
yield (
|
yield (
|
||||||
"Suffix '{}' doesn't match time_unit. "
|
f"Suffix '{unit_in_name}' doesn't match time_unit "
|
||||||
|
f"'{time_unit.name}'. "
|
||||||
"Confirm the unit is correct and only include time_unit."
|
"Confirm the unit is correct and only include time_unit."
|
||||||
).format(unit_in_name)
|
)
|
||||||
|
|
||||||
elif hasattr(metric, "memory_unit"):
|
elif memory_unit is not None:
|
||||||
if (
|
if (
|
||||||
unit_in_name == MEMORY_UNIT_ABBREV.get(metric.memory_unit.name)
|
unit_in_name == MEMORY_UNIT_ABBREV.get(memory_unit.name)
|
||||||
or unit_in_name == metric.memory_unit.name
|
or unit_in_name == memory_unit.name
|
||||||
):
|
):
|
||||||
yield (
|
yield (
|
||||||
"Suffix '{}' is redundant with memory_unit. "
|
f"Suffix '{unit_in_name}' is redundant with memory_unit "
|
||||||
|
f"'{memory_unit.name}'. "
|
||||||
"Only include memory_unit."
|
"Only include memory_unit."
|
||||||
).format(unit_in_name)
|
)
|
||||||
elif (
|
elif (
|
||||||
unit_in_name in MEMORY_UNIT_ABBREV.keys()
|
unit_in_name in MEMORY_UNIT_ABBREV.keys()
|
||||||
or unit_in_name in MEMORY_UNIT_ABBREV.values()
|
or unit_in_name in MEMORY_UNIT_ABBREV.values()
|
||||||
):
|
):
|
||||||
yield (
|
yield (
|
||||||
"Suffix '{}' doesn't match memory_unit. "
|
f"Suffix '{unit_in_name}' doesn't match memory_unit "
|
||||||
|
f"{memory_unit.name}'. "
|
||||||
"Confirm the unit is correct and only include memory_unit."
|
"Confirm the unit is correct and only include memory_unit."
|
||||||
).format(unit_in_name)
|
)
|
||||||
|
|
||||||
elif hasattr(metric, "unit"):
|
elif unit is not None:
|
||||||
if unit_in_name == metric.unit:
|
if unit_in_name == unit:
|
||||||
yield (
|
yield (
|
||||||
"Suffix '{}' is redundant with unit param. " "Only include unit."
|
f"Suffix '{unit_in_name}' is redundant with unit param "
|
||||||
).format(unit_in_name)
|
f"'{unit}'. "
|
||||||
|
"Only include unit."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_category_generic(category_name, metrics):
|
def check_category_generic(
|
||||||
|
category_name: str, metrics: Iterable[metrics.Metric]
|
||||||
|
) -> LintGenerator:
|
||||||
"""
|
"""
|
||||||
The category name is too generic.
|
The category name is too generic.
|
||||||
"""
|
"""
|
||||||
GENERIC_CATEGORIES = ["metrics", "events"]
|
GENERIC_CATEGORIES = ["metrics", "events"]
|
||||||
|
|
||||||
if category_name in GENERIC_CATEGORIES:
|
if category_name in GENERIC_CATEGORIES:
|
||||||
yield "Category '{}' is too generic.".format(category_name)
|
yield (
|
||||||
|
f"Category '{category_name}' is too generic. "
|
||||||
|
f"Don't use {_english_list(GENERIC_CATEGORIES)} for category names"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_bug_number(metric, parser_config={}):
|
def check_bug_number(
|
||||||
|
metric: metrics.Metric, parser_config: Dict[str, Any] = {}
|
||||||
|
) -> LintGenerator:
|
||||||
number_bugs = [str(bug) for bug in metric.bugs if isinstance(bug, int)]
|
number_bugs = [str(bug) for bug in metric.bugs if isinstance(bug, int)]
|
||||||
|
|
||||||
if len(number_bugs):
|
if len(number_bugs):
|
||||||
yield (
|
yield (
|
||||||
"For bugs {}: "
|
f"For bugs {', '.join(number_bugs)}: "
|
||||||
"Bug numbers are deprecated and should be changed to full URLs."
|
"Bug numbers are deprecated and should be changed to full URLs. "
|
||||||
).format(", ".join(number_bugs))
|
"For example, use 'http://bugzilla.mozilla.org/12345' instead of '12345'."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_valid_in_baseline(metric, parser_config={}):
|
def check_valid_in_baseline(
|
||||||
|
metric: metrics.Metric, parser_config: Dict[str, Any] = {}
|
||||||
|
) -> LintGenerator:
|
||||||
allow_reserved = parser_config.get("allow_reserved", False)
|
allow_reserved = parser_config.get("allow_reserved", False)
|
||||||
|
|
||||||
if not allow_reserved and "baseline" in metric.send_in_pings:
|
if not allow_reserved and "baseline" in metric.send_in_pings:
|
||||||
yield (
|
yield (
|
||||||
"The baseline ping is Glean-internal. "
|
"The baseline ping is Glean-internal. "
|
||||||
"User metrics should go into the 'metrics' ping or custom pings."
|
"Remove 'baseline' from the send_in_pings array."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_misspelled_pings(metric, parser_config={}):
|
def check_misspelled_pings(
|
||||||
builtin_pings = ["metrics", "events"]
|
metric: metrics.Metric, parser_config: Dict[str, Any] = {}
|
||||||
|
) -> LintGenerator:
|
||||||
for ping in metric.send_in_pings:
|
for ping in metric.send_in_pings:
|
||||||
for builtin in builtin_pings:
|
for builtin in pings.RESERVED_PING_NAMES:
|
||||||
distance = _hamming_distance(ping, builtin)
|
distance = _hamming_distance(ping, builtin)
|
||||||
if distance == 1:
|
if distance == 1:
|
||||||
yield ("Ping '{}' seems misspelled. Did you mean '{}'?").format(
|
yield f"Ping '{ping}' seems misspelled. Did you mean '{builtin}'?"
|
||||||
ping, builtin
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
CATEGORY_CHECKS = {
|
def check_user_lifetime_expiration(
|
||||||
"COMMON_PREFIX": check_common_prefix,
|
metric: metrics.Metric, parser_config: Dict[str, Any] = {}
|
||||||
"CATEGORY_GENERIC": check_category_generic,
|
) -> LintGenerator:
|
||||||
|
|
||||||
|
if metric.lifetime == metrics.Lifetime.user and metric.expires != "never":
|
||||||
|
yield (
|
||||||
|
"Metrics with 'user' lifetime cannot have an expiration date. "
|
||||||
|
"They live as long as the user profile does. "
|
||||||
|
"Set expires to 'never'."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# The checks that operate on an entire category of metrics:
|
||||||
|
# {NAME: (function, is_error)}
|
||||||
|
CATEGORY_CHECKS: Dict[
|
||||||
|
str, Tuple[Callable[[str, Iterable[metrics.Metric]], LintGenerator], CheckType]
|
||||||
|
] = {
|
||||||
|
"COMMON_PREFIX": (check_common_prefix, CheckType.error),
|
||||||
|
"CATEGORY_GENERIC": (check_category_generic, CheckType.error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
INDIVIDUAL_CHECKS = {
|
# The checks that operate on individual metrics:
|
||||||
"UNIT_IN_NAME": check_unit_in_name,
|
# {NAME: (function, is_error)}
|
||||||
"BUG_NUMBER": check_bug_number,
|
INDIVIDUAL_CHECKS: Dict[
|
||||||
"BASELINE_PING": check_valid_in_baseline,
|
str, Tuple[Callable[[metrics.Metric, dict], LintGenerator], CheckType]
|
||||||
"MISSPELLED_PING": check_misspelled_pings,
|
] = {
|
||||||
|
"UNIT_IN_NAME": (check_unit_in_name, CheckType.error),
|
||||||
|
"BUG_NUMBER": (check_bug_number, CheckType.error),
|
||||||
|
"BASELINE_PING": (check_valid_in_baseline, CheckType.error),
|
||||||
|
"MISSPELLED_PING": (check_misspelled_pings, CheckType.error),
|
||||||
|
"USER_LIFETIME_EXPIRATION": (check_user_lifetime_expiration, CheckType.warning),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def lint_metrics(objs, parser_config={}, file=sys.stderr):
|
class GlinterNit:
|
||||||
|
def __init__(self, check_name: str, name: str, msg: str, check_type: CheckType):
|
||||||
|
self.check_name = check_name
|
||||||
|
self.name = name
|
||||||
|
self.msg = msg
|
||||||
|
self.check_type = check_type
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
return (
|
||||||
|
f"{self.check_type.name.upper()}: {self.check_name}: "
|
||||||
|
f"{self.name}: {self.msg}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def lint_metrics(
|
||||||
|
objs: metrics.ObjectTree, parser_config: Dict[str, Any] = {}, file=sys.stderr
|
||||||
|
) -> List[GlinterNit]:
|
||||||
"""
|
"""
|
||||||
Performs glinter checks on a set of metrics objects.
|
Performs glinter checks on a set of metrics objects.
|
||||||
|
|
||||||
|
@ -193,26 +282,40 @@ def lint_metrics(objs, parser_config={}, file=sys.stderr):
|
||||||
:param file: The stream to write errors to.
|
:param file: The stream to write errors to.
|
||||||
:returns: List of nits.
|
:returns: List of nits.
|
||||||
"""
|
"""
|
||||||
nits = []
|
nits: List[GlinterNit] = []
|
||||||
for (category_name, metrics) in sorted(list(objs.items())):
|
for (category_name, category) in sorted(list(objs.items())):
|
||||||
if category_name == "pings":
|
if category_name == "pings":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for (check_name, check_func) in CATEGORY_CHECKS.items():
|
# Make sure the category has only Metrics, not Pings
|
||||||
if any(check_name in metric.no_lint for metric in metrics.values()):
|
category_metrics = dict(
|
||||||
|
(name, metric)
|
||||||
|
for (name, metric) in category.items()
|
||||||
|
if isinstance(metric, metrics.Metric)
|
||||||
|
)
|
||||||
|
|
||||||
|
for (cat_check_name, (cat_check_func, check_type)) in CATEGORY_CHECKS.items():
|
||||||
|
if any(
|
||||||
|
cat_check_name in metric.no_lint for metric in category_metrics.values()
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
nits.extend(
|
nits.extend(
|
||||||
(check_name, category_name, msg)
|
GlinterNit(cat_check_name, category_name, msg, check_type)
|
||||||
for msg in check_func(category_name, metrics.values())
|
for msg in cat_check_func(category_name, category_metrics.values())
|
||||||
)
|
)
|
||||||
|
|
||||||
for (metric_name, metric) in sorted(list(metrics.items())):
|
for (metric_name, metric) in sorted(list(category_metrics.items())):
|
||||||
for (check_name, check_func) in INDIVIDUAL_CHECKS.items():
|
for (check_name, (check_func, check_type)) in INDIVIDUAL_CHECKS.items():
|
||||||
new_nits = list(check_func(metric, parser_config))
|
new_nits = list(check_func(metric, parser_config))
|
||||||
if len(new_nits):
|
if len(new_nits):
|
||||||
if check_name not in metric.no_lint:
|
if check_name not in metric.no_lint:
|
||||||
nits.extend(
|
nits.extend(
|
||||||
(check_name, ".".join([metric.category, metric.name]), msg)
|
GlinterNit(
|
||||||
|
check_name,
|
||||||
|
".".join([metric.category, metric.name]),
|
||||||
|
msg,
|
||||||
|
check_type,
|
||||||
|
)
|
||||||
for msg in new_nits
|
for msg in new_nits
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -221,20 +324,21 @@ def lint_metrics(objs, parser_config={}, file=sys.stderr):
|
||||||
and check_name in metric.no_lint
|
and check_name in metric.no_lint
|
||||||
):
|
):
|
||||||
nits.append(
|
nits.append(
|
||||||
(
|
GlinterNit(
|
||||||
"SUPERFLUOUS_NO_LINT",
|
"SUPERFLUOUS_NO_LINT",
|
||||||
".".join([metric.category, metric.name]),
|
".".join([metric.category, metric.name]),
|
||||||
(
|
(
|
||||||
"Superfluous no_lint entry '{}'. "
|
f"Superfluous no_lint entry '{check_name}'. "
|
||||||
"Please remove it."
|
"Please remove it."
|
||||||
).format(check_name),
|
),
|
||||||
|
CheckType.warning,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(nits):
|
if len(nits):
|
||||||
print("Sorry, Glean found some glinter nits:", file=file)
|
print("Sorry, Glean found some glinter nits:", file=file)
|
||||||
for check_name, name, msg in nits:
|
for nit in nits:
|
||||||
print("{}: {}: {}".format(check_name, name, msg), file=file)
|
print(nit.format(), file=file)
|
||||||
print("", file=file)
|
print("", file=file)
|
||||||
print("Please fix the above nits to continue.", file=file)
|
print("Please fix the above nits to continue.", file=file)
|
||||||
print(
|
print(
|
||||||
|
@ -248,7 +352,7 @@ def lint_metrics(objs, parser_config={}, file=sys.stderr):
|
||||||
return nits
|
return nits
|
||||||
|
|
||||||
|
|
||||||
def lint_yaml_files(input_filepaths, file=sys.stderr):
|
def lint_yaml_files(input_filepaths: Iterable[Path], file=sys.stderr) -> List:
|
||||||
"""
|
"""
|
||||||
Performs glinter YAML lint on a set of files.
|
Performs glinter YAML lint on a set of files.
|
||||||
|
|
||||||
|
@ -257,32 +361,36 @@ def lint_yaml_files(input_filepaths, file=sys.stderr):
|
||||||
:returns: List of nits.
|
:returns: List of nits.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
nits = []
|
# Generic type since the actual type comes from yamllint, which we don't
|
||||||
|
# control.
|
||||||
|
nits: List = []
|
||||||
for path in input_filepaths:
|
for path in input_filepaths:
|
||||||
# yamllint needs both the file content and the path.
|
# yamllint needs both the file content and the path.
|
||||||
file_content = None
|
file_content = None
|
||||||
with path.open("r") as fd:
|
with path.open("r", encoding="utf-8") as fd:
|
||||||
file_content = fd.read()
|
file_content = fd.read()
|
||||||
|
|
||||||
problems = linter.run(file_content, YamlLintConfig("extends: default"), path)
|
problems = linter.run(file_content, YamlLintConfig("extends: default"), path)
|
||||||
nits.extend(p for p in problems)
|
nits.extend((path, p) for p in problems)
|
||||||
|
|
||||||
if len(nits):
|
if len(nits):
|
||||||
print("Sorry, Glean found some glinter nits:", file=file)
|
print("Sorry, Glean found some glinter nits:", file=file)
|
||||||
for p in nits:
|
for (path, p) in nits:
|
||||||
print("{} ({}:{}) - {}".format(path, p.line, p.column, p.message))
|
print(f"{path} ({p.line}:{p.column}) - {p.message}")
|
||||||
print("", file=file)
|
print("", file=file)
|
||||||
print("Please fix the above nits to continue.", file=file)
|
print("Please fix the above nits to continue.", file=file)
|
||||||
|
|
||||||
return nits
|
return [x[1] for x in nits]
|
||||||
|
|
||||||
|
|
||||||
def glinter(input_filepaths, parser_config={}, file=sys.stderr):
|
def glinter(
|
||||||
|
input_filepaths: Iterable[Path], parser_config: Dict[str, Any] = {}, file=sys.stderr
|
||||||
|
) -> int:
|
||||||
"""
|
"""
|
||||||
Commandline helper for glinter.
|
Commandline helper for glinter.
|
||||||
|
|
||||||
:param input_filepaths: List of Path objects to load metrics from.
|
:param input_filepaths: List of Path objects to load metrics from.
|
||||||
:param parser_config: Parser configuration objects, passed to
|
:param parser_config: Parser configuration object, passed to
|
||||||
`parser.parse_objects`.
|
`parser.parse_objects`.
|
||||||
:param file: The stream to write the errors to.
|
:param file: The stream to write the errors to.
|
||||||
:return: Non-zero if there were any glinter errors.
|
:return: Non-zero if there were any glinter errors.
|
||||||
|
@ -295,8 +403,9 @@ def glinter(input_filepaths, parser_config={}, file=sys.stderr):
|
||||||
if util.report_validation_errors(objs):
|
if util.report_validation_errors(objs):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if lint_metrics(objs.value, parser_config=parser_config, file=file):
|
nits = lint_metrics(objs.value, parser_config=parser_config, file=file)
|
||||||
|
if any(nit.check_type == CheckType.error for nit in nits):
|
||||||
return 1
|
return 1
|
||||||
|
if len(nits) == 0:
|
||||||
print("✨ Your metrics are Glean! ✨", file=file)
|
print("✨ Your metrics are Glean! ✨", file=file)
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -8,13 +8,17 @@
|
||||||
Outputter to generate Markdown documentation for metrics.
|
Outputter to generate Markdown documentation for metrics.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
|
||||||
from . import metrics
|
from . import metrics
|
||||||
from . import pings
|
from . import pings
|
||||||
from . import util
|
from . import util
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
def extra_info(obj):
|
def extra_info(obj: Union[metrics.Metric, pings.Ping]) -> List[Tuple[str, str]]:
|
||||||
"""
|
"""
|
||||||
Returns a list of string to string tuples with extra information for the type
|
Returns a list of string to string tuples with extra information for the type
|
||||||
(e.g. extra keys for events) or an empty list if nothing is available.
|
(e.g. extra keys for events) or an empty list if nothing is available.
|
||||||
|
@ -29,10 +33,13 @@ def extra_info(obj):
|
||||||
for label in obj.ordered_labels:
|
for label in obj.ordered_labels:
|
||||||
extra_info.append((label, None))
|
extra_info.append((label, None))
|
||||||
|
|
||||||
|
if isinstance(obj, metrics.Jwe):
|
||||||
|
extra_info.append(("decrypted_name", obj.decrypted_name))
|
||||||
|
|
||||||
return extra_info
|
return extra_info
|
||||||
|
|
||||||
|
|
||||||
def ping_desc(ping_name, custom_pings_cache={}):
|
def ping_desc(ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}) -> str:
|
||||||
"""
|
"""
|
||||||
Return a text description of the ping. If a custom_pings_cache
|
Return a text description of the ping. If a custom_pings_cache
|
||||||
is available, look in there for non-reserved ping names description.
|
is available, look in there for non-reserved ping names description.
|
||||||
|
@ -52,23 +59,21 @@ def ping_desc(ping_name, custom_pings_cache={}):
|
||||||
return desc
|
return desc
|
||||||
|
|
||||||
|
|
||||||
def metrics_docs(obj_name):
|
def metrics_docs(obj_name: str) -> str:
|
||||||
"""
|
"""
|
||||||
Return a link to the documentation entry for the Glean SDK metric of the
|
Return a link to the documentation entry for the Glean SDK metric of the
|
||||||
requested type.
|
requested type.
|
||||||
"""
|
"""
|
||||||
base_url = "https://mozilla.github.io/glean/book/user/metrics/{}.html"
|
|
||||||
|
|
||||||
# We need to fixup labeled stuff, as types are singular and docs refer
|
# We need to fixup labeled stuff, as types are singular and docs refer
|
||||||
# to them as plural.
|
# to them as plural.
|
||||||
fixedup_name = obj_name
|
fixedup_name = obj_name
|
||||||
if obj_name.startswith("labeled_"):
|
if obj_name.startswith("labeled_"):
|
||||||
fixedup_name += "s"
|
fixedup_name += "s"
|
||||||
|
|
||||||
return base_url.format(fixedup_name)
|
return f"https://mozilla.github.io/glean/book/user/metrics/{fixedup_name}.html"
|
||||||
|
|
||||||
|
|
||||||
def ping_docs(ping_name):
|
def ping_docs(ping_name: str) -> str:
|
||||||
"""
|
"""
|
||||||
Return a link to the documentation entry for the requested Glean SDK
|
Return a link to the documentation entry for the requested Glean SDK
|
||||||
built-in ping.
|
built-in ping.
|
||||||
|
@ -76,17 +81,19 @@ def ping_docs(ping_name):
|
||||||
if ping_name not in pings.RESERVED_PING_NAMES:
|
if ping_name not in pings.RESERVED_PING_NAMES:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
return "https://mozilla.github.io/glean/book/user/pings/{}.html".format(ping_name)
|
return f"https://mozilla.github.io/glean/book/user/pings/{ping_name}.html"
|
||||||
|
|
||||||
|
|
||||||
def if_empty(ping_name, custom_pings_cache={}):
|
def if_empty(ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}) -> bool:
|
||||||
return (
|
if ping_name in custom_pings_cache:
|
||||||
custom_pings_cache.get(ping_name)
|
return custom_pings_cache[ping_name].send_if_empty
|
||||||
and custom_pings_cache[ping_name].send_if_empty
|
else:
|
||||||
)
|
return False
|
||||||
|
|
||||||
|
|
||||||
def ping_reasons(ping_name, custom_pings_cache):
|
def ping_reasons(
|
||||||
|
ping_name: str, custom_pings_cache: Dict[str, pings.Ping]
|
||||||
|
) -> Dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Returns the reasons dictionary for the ping.
|
Returns the reasons dictionary for the ping.
|
||||||
"""
|
"""
|
||||||
|
@ -98,7 +105,45 @@ def ping_reasons(ping_name, custom_pings_cache):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def output_markdown(objs, output_dir, options={}):
|
def ping_data_reviews(
|
||||||
|
ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}
|
||||||
|
) -> Optional[List[str]]:
|
||||||
|
if ping_name in custom_pings_cache:
|
||||||
|
return custom_pings_cache[ping_name].data_reviews
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def ping_bugs(
|
||||||
|
ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}
|
||||||
|
) -> Optional[List[str]]:
|
||||||
|
if ping_name in custom_pings_cache:
|
||||||
|
return custom_pings_cache[ping_name].bugs
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def ping_include_client_id(
|
||||||
|
ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}
|
||||||
|
) -> bool:
|
||||||
|
if ping_name in custom_pings_cache:
|
||||||
|
return custom_pings_cache[ping_name].include_client_id
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def data_sensitivity_numbers(
|
||||||
|
data_sensitivity: Optional[List[metrics.DataSensitivity]],
|
||||||
|
) -> str:
|
||||||
|
if data_sensitivity is None:
|
||||||
|
return "unknown"
|
||||||
|
else:
|
||||||
|
return ", ".join(str(x.value) for x in data_sensitivity)
|
||||||
|
|
||||||
|
|
||||||
|
def output_markdown(
|
||||||
|
objs: metrics.ObjectTree, output_dir: Path, options: Dict[str, Any] = {}
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Given a tree of objects, output Markdown docs to `output_dir`.
|
Given a tree of objects, output Markdown docs to `output_dir`.
|
||||||
|
|
||||||
|
@ -106,7 +151,7 @@ def output_markdown(objs, output_dir, options={}):
|
||||||
contents and a section for each ping metrics are collected for.
|
contents and a section for each ping metrics are collected for.
|
||||||
|
|
||||||
:param objects: A tree of objects (metrics and pings) as returned from
|
:param objects: A tree of objects (metrics and pings) as returned from
|
||||||
`parser.parse_objects`.
|
`parser.parse_objects`.
|
||||||
:param output_dir: Path to an output directory to write to.
|
:param output_dir: Path to an output directory to write to.
|
||||||
:param options: options dictionary, with the following optional key:
|
:param options: options dictionary, with the following optional key:
|
||||||
- `project_title`: The projects title.
|
- `project_title`: The projects title.
|
||||||
|
@ -127,21 +172,23 @@ def output_markdown(objs, output_dir, options={}):
|
||||||
# }
|
# }
|
||||||
#
|
#
|
||||||
# This also builds a dictionary of custom pings, if available.
|
# This also builds a dictionary of custom pings, if available.
|
||||||
custom_pings_cache = defaultdict()
|
custom_pings_cache: Dict[str, pings.Ping] = defaultdict()
|
||||||
metrics_by_pings = defaultdict(list)
|
metrics_by_pings: Dict[str, List[metrics.Metric]] = defaultdict(list)
|
||||||
for category_key, category_val in objs.items():
|
for category_key, category_val in objs.items():
|
||||||
for obj in category_val.values():
|
for obj in category_val.values():
|
||||||
# Filter out custom pings. We will need them for extracting
|
# Filter out custom pings. We will need them for extracting
|
||||||
# the description
|
# the description
|
||||||
if isinstance(obj, pings.Ping):
|
if isinstance(obj, pings.Ping):
|
||||||
custom_pings_cache[obj.name] = obj
|
custom_pings_cache[obj.name] = obj
|
||||||
if obj.send_if_empty:
|
# Pings that have `send_if_empty` set to true,
|
||||||
|
# might not have any metrics. They need to at least have an
|
||||||
|
# empty array of metrics to show up on the template.
|
||||||
|
if obj.send_if_empty and not metrics_by_pings[obj.name]:
|
||||||
metrics_by_pings[obj.name] = []
|
metrics_by_pings[obj.name] = []
|
||||||
elif obj.is_internal_metric():
|
|
||||||
# This is an internal Glean metric, and we don't
|
# If this is an internal Glean metric, and we don't
|
||||||
# want docs for it.
|
# want docs for it.
|
||||||
continue
|
if isinstance(obj, metrics.Metric) and not obj.is_internal_metric():
|
||||||
else:
|
|
||||||
# If we get here, obj is definitely a metric we want
|
# If we get here, obj is definitely a metric we want
|
||||||
# docs for.
|
# docs for.
|
||||||
for ping_name in obj.send_in_pings:
|
for ping_name in obj.send_in_pings:
|
||||||
|
@ -165,6 +212,13 @@ def output_markdown(objs, output_dir, options={}):
|
||||||
("ping_send_if_empty", lambda x: if_empty(x, custom_pings_cache)),
|
("ping_send_if_empty", lambda x: if_empty(x, custom_pings_cache)),
|
||||||
("ping_docs", ping_docs),
|
("ping_docs", ping_docs),
|
||||||
("ping_reasons", lambda x: ping_reasons(x, custom_pings_cache)),
|
("ping_reasons", lambda x: ping_reasons(x, custom_pings_cache)),
|
||||||
|
("ping_data_reviews", lambda x: ping_data_reviews(x, custom_pings_cache)),
|
||||||
|
("ping_bugs", lambda x: ping_bugs(x, custom_pings_cache)),
|
||||||
|
(
|
||||||
|
"ping_include_client_id",
|
||||||
|
lambda x: ping_include_client_id(x, custom_pings_cache),
|
||||||
|
),
|
||||||
|
("data_sensitivity_numbers", data_sensitivity_numbers),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -9,50 +9,55 @@ Classes for each of the high-level metric types.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import sys
|
from typing import Any, Dict, List, Optional, Type, Union # noqa
|
||||||
|
|
||||||
|
|
||||||
|
from . import pings
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
# Import a backport of PEP487 to support __init_subclass__
|
# Important: if the values are ever changing here, make sure
|
||||||
if sys.version_info < (3, 6):
|
# to also fix mozilla/glean. Otherwise language bindings may
|
||||||
import pep487
|
# break there.
|
||||||
|
|
||||||
base_object = pep487.PEP487Object
|
|
||||||
else:
|
|
||||||
base_object = object
|
|
||||||
|
|
||||||
|
|
||||||
class Lifetime(enum.Enum):
|
class Lifetime(enum.Enum):
|
||||||
ping = 0
|
ping = 0
|
||||||
user = 1
|
application = 1
|
||||||
application = 2
|
user = 2
|
||||||
|
|
||||||
|
|
||||||
class Metric(base_object):
|
class DataSensitivity(enum.Enum):
|
||||||
glean_internal_metric_cat = "glean.internal.metrics"
|
technical = 1
|
||||||
metric_types = {}
|
interaction = 2
|
||||||
default_store_names = ["metrics"]
|
web_activity = 3
|
||||||
|
highly_sensitive = 4
|
||||||
|
|
||||||
|
|
||||||
|
class Metric:
|
||||||
|
typename: str = "ERROR"
|
||||||
|
glean_internal_metric_cat: str = "glean.internal.metrics"
|
||||||
|
metric_types: Dict[str, Any] = {}
|
||||||
|
default_store_names: List[str] = ["metrics"]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type,
|
type: str,
|
||||||
category,
|
category: str,
|
||||||
name,
|
name: str,
|
||||||
bugs,
|
bugs: List[str],
|
||||||
description,
|
description: str,
|
||||||
notification_emails,
|
notification_emails: List[str],
|
||||||
expires,
|
expires: str,
|
||||||
data_reviews=None,
|
data_reviews: Optional[List[str]] = None,
|
||||||
version=0,
|
version: int = 0,
|
||||||
disabled=False,
|
disabled: bool = False,
|
||||||
lifetime="ping",
|
lifetime: str = "ping",
|
||||||
send_in_pings=None,
|
send_in_pings: Optional[List[str]] = None,
|
||||||
unit="",
|
unit: str = "",
|
||||||
gecko_datapoint="",
|
gecko_datapoint: str = "",
|
||||||
no_lint=None,
|
no_lint: Optional[List[str]] = None,
|
||||||
_config=None,
|
data_sensitivity: Optional[List[str]] = None,
|
||||||
_validated=False,
|
_config: Optional[Dict[str, Any]] = None,
|
||||||
|
_validated: bool = False,
|
||||||
):
|
):
|
||||||
# Avoid cyclical import
|
# Avoid cyclical import
|
||||||
from . import parser
|
from . import parser
|
||||||
|
@ -78,6 +83,10 @@ class Metric(base_object):
|
||||||
if no_lint is None:
|
if no_lint is None:
|
||||||
no_lint = []
|
no_lint = []
|
||||||
self.no_lint = no_lint
|
self.no_lint = no_lint
|
||||||
|
if data_sensitivity is not None:
|
||||||
|
self.data_sensitivity = [
|
||||||
|
getattr(DataSensitivity, x) for x in data_sensitivity
|
||||||
|
]
|
||||||
|
|
||||||
# _validated indicates whether this metric has already been jsonschema
|
# _validated indicates whether this metric has already been jsonschema
|
||||||
# validated (but not any of the Python-level validation).
|
# validated (but not any of the Python-level validation).
|
||||||
|
@ -85,7 +94,7 @@ class Metric(base_object):
|
||||||
data = {
|
data = {
|
||||||
"$schema": parser.METRICS_ID,
|
"$schema": parser.METRICS_ID,
|
||||||
self.category: {self.name: self.serialize()},
|
self.category: {self.name: self.serialize()},
|
||||||
}
|
} # type: Dict[str, util.JSONType]
|
||||||
for error in parser.validate(data):
|
for error in parser.validate(data):
|
||||||
raise ValueError(error)
|
raise ValueError(error)
|
||||||
|
|
||||||
|
@ -101,7 +110,14 @@ class Metric(base_object):
|
||||||
super().__init_subclass__(**kwargs)
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make_metric(cls, category, name, metric_info, config={}, validated=False):
|
def make_metric(
|
||||||
|
cls,
|
||||||
|
category: str,
|
||||||
|
name: str,
|
||||||
|
metric_info: Dict[str, util.JSONType],
|
||||||
|
config: Dict[str, Any] = {},
|
||||||
|
validated: bool = False,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Given a metric_info dictionary from metrics.yaml, return a metric
|
Given a metric_info dictionary from metrics.yaml, return a metric
|
||||||
instance.
|
instance.
|
||||||
|
@ -116,15 +132,17 @@ class Metric(base_object):
|
||||||
:return: A new Metric instance.
|
:return: A new Metric instance.
|
||||||
"""
|
"""
|
||||||
metric_type = metric_info["type"]
|
metric_type = metric_info["type"]
|
||||||
|
if not isinstance(metric_type, str):
|
||||||
|
raise TypeError(f"Unknown metric type {metric_type}")
|
||||||
return cls.metric_types[metric_type](
|
return cls.metric_types[metric_type](
|
||||||
category=category,
|
category=category,
|
||||||
name=name,
|
name=name,
|
||||||
_validated=validated,
|
_validated=validated,
|
||||||
_config=config,
|
_config=config,
|
||||||
**metric_info
|
**metric_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
def serialize(self):
|
def serialize(self) -> Dict[str, util.JSONType]:
|
||||||
"""
|
"""
|
||||||
Serialize the metric back to JSON object model.
|
Serialize the metric back to JSON object model.
|
||||||
"""
|
"""
|
||||||
|
@ -135,11 +153,13 @@ class Metric(base_object):
|
||||||
d[key] = d[key].name
|
d[key] = d[key].name
|
||||||
if isinstance(val, set):
|
if isinstance(val, set):
|
||||||
d[key] = sorted(list(val))
|
d[key] = sorted(list(val))
|
||||||
|
if isinstance(val, list) and len(val) and isinstance(val[0], enum.Enum):
|
||||||
|
d[key] = [x.name for x in val]
|
||||||
del d["name"]
|
del d["name"]
|
||||||
del d["category"]
|
del d["category"]
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def identifier(self):
|
def identifier(self) -> str:
|
||||||
"""
|
"""
|
||||||
Create an identifier unique for this metric.
|
Create an identifier unique for this metric.
|
||||||
Generally, category.name; however, Glean internal
|
Generally, category.name; however, Glean internal
|
||||||
|
@ -149,17 +169,17 @@ class Metric(base_object):
|
||||||
return self.name
|
return self.name
|
||||||
return ".".join((self.category, self.name))
|
return ".".join((self.category, self.name))
|
||||||
|
|
||||||
def is_disabled(self):
|
def is_disabled(self) -> bool:
|
||||||
return self.disabled or self.is_expired()
|
return self.disabled or self.is_expired()
|
||||||
|
|
||||||
def is_expired(self):
|
def is_expired(self) -> bool:
|
||||||
return util.is_expired(self.expires)
|
return util.is_expired(self.expires)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_expires(expires):
|
def validate_expires(expires) -> None:
|
||||||
return util.validate_expires(expires)
|
return util.validate_expires(expires)
|
||||||
|
|
||||||
def is_internal_metric(self):
|
def is_internal_metric(self) -> bool:
|
||||||
return self.category in (Metric.glean_internal_metric_cat, "")
|
return self.category in (Metric.glean_internal_metric_cat, "")
|
||||||
|
|
||||||
|
|
||||||
|
@ -206,6 +226,10 @@ class Timespan(TimeBase):
|
||||||
class TimingDistribution(TimeBase):
|
class TimingDistribution(TimeBase):
|
||||||
typename = "timing_distribution"
|
typename = "timing_distribution"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.time_unit = getattr(TimeUnit, kwargs.pop("time_unit", "nanosecond"))
|
||||||
|
Metric.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MemoryUnit(enum.Enum):
|
class MemoryUnit(enum.Enum):
|
||||||
byte = 0
|
byte = 0
|
||||||
|
@ -249,7 +273,7 @@ class Event(Metric):
|
||||||
|
|
||||||
default_store_names = ["events"]
|
default_store_names = ["events"]
|
||||||
|
|
||||||
_generate_enums = [("extra_keys", "Keys")]
|
_generate_enums = [("allowed_extra_keys", "Keys")]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.extra_keys = kwargs.pop("extra_keys", {})
|
self.extra_keys = kwargs.pop("extra_keys", {})
|
||||||
|
@ -262,7 +286,7 @@ class Event(Metric):
|
||||||
return sorted(list(self.extra_keys.keys()))
|
return sorted(list(self.extra_keys.keys()))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_extra_keys(extra_keys, config):
|
def validate_extra_keys(extra_keys: Dict[str, str], config: Dict[str, Any]) -> None:
|
||||||
if not config.get("allow_reserved") and any(
|
if not config.get("allow_reserved") and any(
|
||||||
k.startswith("glean.") for k in extra_keys.keys()
|
k.startswith("glean.") for k in extra_keys.keys()
|
||||||
):
|
):
|
||||||
|
@ -276,6 +300,14 @@ class Uuid(Metric):
|
||||||
typename = "uuid"
|
typename = "uuid"
|
||||||
|
|
||||||
|
|
||||||
|
class Jwe(Metric):
|
||||||
|
typename = "jwe"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.decrypted_name = kwargs.pop("decrypted_name")
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Labeled(Metric):
|
class Labeled(Metric):
|
||||||
labeled = True
|
labeled = True
|
||||||
|
|
||||||
|
@ -289,7 +321,7 @@ class Labeled(Metric):
|
||||||
self.labels = None
|
self.labels = None
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def serialize(self):
|
def serialize(self) -> Dict[str, util.JSONType]:
|
||||||
"""
|
"""
|
||||||
Serialize the metric back to JSON object model.
|
Serialize the metric back to JSON object model.
|
||||||
"""
|
"""
|
||||||
|
@ -309,3 +341,6 @@ class LabeledString(Labeled, String):
|
||||||
|
|
||||||
class LabeledCounter(Labeled, Counter):
|
class LabeledCounter(Labeled, Counter):
|
||||||
typename = "labeled_counter"
|
typename = "labeled_counter"
|
||||||
|
|
||||||
|
|
||||||
|
ObjectTree = Dict[str, Dict[str, Union[Metric, pings.Ping]]]
|
||||||
|
|
|
@ -12,11 +12,12 @@ from collections import OrderedDict
|
||||||
import functools
|
import functools
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from typing import Any, Dict, Generator, Iterable, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema # type: ignore
|
||||||
from jsonschema.exceptions import ValidationError
|
from jsonschema.exceptions import ValidationError # type: ignore
|
||||||
|
|
||||||
from .metrics import Metric
|
from .metrics import Metric, ObjectTree
|
||||||
from .pings import Ping, RESERVED_PING_NAMES
|
from .pings import Ping, RESERVED_PING_NAMES
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
@ -45,13 +46,15 @@ def _update_validator(validator):
|
||||||
if len(missing_properties):
|
if len(missing_properties):
|
||||||
missing_properties = sorted(list(missing_properties))
|
missing_properties = sorted(list(missing_properties))
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
"Missing required properties: {}".format(", ".join(missing_properties))
|
f"Missing required properties: {', '.join(missing_properties)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
validator.VALIDATORS["required"] = required
|
validator.VALIDATORS["required"] = required
|
||||||
|
|
||||||
|
|
||||||
def _load_file(filepath):
|
def _load_file(
|
||||||
|
filepath: Path,
|
||||||
|
) -> Generator[str, None, Tuple[Dict[str, util.JSONType], Optional[str]]]:
|
||||||
"""
|
"""
|
||||||
Load a metrics.yaml or pings.yaml format file.
|
Load a metrics.yaml or pings.yaml format file.
|
||||||
"""
|
"""
|
||||||
|
@ -62,15 +65,20 @@ def _load_file(filepath):
|
||||||
return {}, None
|
return {}, None
|
||||||
|
|
||||||
if content is None:
|
if content is None:
|
||||||
yield util.format_error(
|
yield util.format_error(filepath, "", f"'{filepath}' file can not be empty.")
|
||||||
filepath, "", "'{}' file can not be empty.".format(filepath)
|
return {}, None
|
||||||
)
|
|
||||||
|
if not isinstance(content, dict):
|
||||||
return {}, None
|
return {}, None
|
||||||
|
|
||||||
if content == {}:
|
if content == {}:
|
||||||
return {}, None
|
return {}, None
|
||||||
|
|
||||||
filetype = FILE_TYPES.get(content.get("$schema"))
|
schema_key = content.get("$schema")
|
||||||
|
if not isinstance(schema_key, str):
|
||||||
|
raise TypeError(f"Invalid schema key {schema_key}")
|
||||||
|
|
||||||
|
filetype = FILE_TYPES.get(schema_key)
|
||||||
|
|
||||||
for error in validate(content, filepath):
|
for error in validate(content, filepath):
|
||||||
content = {}
|
content = {}
|
||||||
|
@ -80,7 +88,7 @@ def _load_file(filepath):
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=1)
|
@functools.lru_cache(maxsize=1)
|
||||||
def _load_schemas():
|
def _load_schemas() -> Dict[str, Tuple[Any, Any]]:
|
||||||
"""
|
"""
|
||||||
Load all of the known schemas from disk, and put them in a map based on the
|
Load all of the known schemas from disk, and put them in a map based on the
|
||||||
schema's $id.
|
schema's $id.
|
||||||
|
@ -97,7 +105,9 @@ def _load_schemas():
|
||||||
return schemas
|
return schemas
|
||||||
|
|
||||||
|
|
||||||
def _get_schema(schema_id, filepath="<input>"):
|
def _get_schema(
|
||||||
|
schema_id: str, filepath: Union[str, Path] = "<input>"
|
||||||
|
) -> Tuple[Any, Any]:
|
||||||
"""
|
"""
|
||||||
Get the schema for the given schema $id.
|
Get the schema for the given schema $id.
|
||||||
"""
|
"""
|
||||||
|
@ -105,22 +115,25 @@ def _get_schema(schema_id, filepath="<input>"):
|
||||||
if schema_id not in schemas:
|
if schema_id not in schemas:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
util.format_error(
|
util.format_error(
|
||||||
filepath,
|
filepath, "", f"$schema key must be one of {', '.join(schemas.keys())}",
|
||||||
"",
|
|
||||||
"$schema key must be one of {}".format(", ".join(schemas.keys())),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return schemas[schema_id]
|
return schemas[schema_id]
|
||||||
|
|
||||||
|
|
||||||
def _get_schema_for_content(content, filepath):
|
def _get_schema_for_content(
|
||||||
|
content: Dict[str, util.JSONType], filepath: Union[str, Path]
|
||||||
|
) -> Tuple[Any, Any]:
|
||||||
"""
|
"""
|
||||||
Get the appropriate schema for the given JSON content.
|
Get the appropriate schema for the given JSON content.
|
||||||
"""
|
"""
|
||||||
return _get_schema(content.get("$schema"), filepath)
|
schema_url = content.get("$schema")
|
||||||
|
if not isinstance(schema_url, str):
|
||||||
|
raise TypeError("Invalid $schema type {schema_url}")
|
||||||
|
return _get_schema(schema_url, filepath)
|
||||||
|
|
||||||
|
|
||||||
def get_parameter_doc(key):
|
def get_parameter_doc(key: str) -> str:
|
||||||
"""
|
"""
|
||||||
Returns documentation about a specific metric parameter.
|
Returns documentation about a specific metric parameter.
|
||||||
"""
|
"""
|
||||||
|
@ -128,7 +141,7 @@ def get_parameter_doc(key):
|
||||||
return schema["definitions"]["metric"]["properties"][key]["description"]
|
return schema["definitions"]["metric"]["properties"][key]["description"]
|
||||||
|
|
||||||
|
|
||||||
def get_ping_parameter_doc(key):
|
def get_ping_parameter_doc(key: str) -> str:
|
||||||
"""
|
"""
|
||||||
Returns documentation about a specific ping parameter.
|
Returns documentation about a specific ping parameter.
|
||||||
"""
|
"""
|
||||||
|
@ -136,7 +149,9 @@ def get_ping_parameter_doc(key):
|
||||||
return schema["additionalProperties"]["properties"][key]["description"]
|
return schema["additionalProperties"]["properties"][key]["description"]
|
||||||
|
|
||||||
|
|
||||||
def validate(content, filepath="<input>"):
|
def validate(
|
||||||
|
content: Dict[str, util.JSONType], filepath: Union[str, Path] = "<input>"
|
||||||
|
) -> Generator[str, None, None]:
|
||||||
"""
|
"""
|
||||||
Validate the given content against the appropriate schema.
|
Validate the given content against the appropriate schema.
|
||||||
"""
|
"""
|
||||||
|
@ -151,7 +166,13 @@ def validate(content, filepath="<input>"):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
def _instantiate_metrics(
|
||||||
|
all_objects: ObjectTree,
|
||||||
|
sources: Dict[Any, Path],
|
||||||
|
content: Dict[str, util.JSONType],
|
||||||
|
filepath: Path,
|
||||||
|
config: Dict[str, Any],
|
||||||
|
) -> Generator[str, None, None]:
|
||||||
"""
|
"""
|
||||||
Load a list of metrics.yaml files, convert the JSON information into Metric
|
Load a list of metrics.yaml files, convert the JSON information into Metric
|
||||||
objects, and merge them into a single tree.
|
objects, and merge them into a single tree.
|
||||||
|
@ -166,12 +187,16 @@ def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
||||||
if not config.get("allow_reserved") and category_key.split(".")[0] == "glean":
|
if not config.get("allow_reserved") and category_key.split(".")[0] == "glean":
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath,
|
||||||
"For category '{}'".format(category_key),
|
f"For category '{category_key}'",
|
||||||
"Categories beginning with 'glean' are reserved for "
|
"Categories beginning with 'glean' are reserved for "
|
||||||
"Glean internal use.",
|
"Glean internal use.",
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
all_objects.setdefault(category_key, OrderedDict())
|
all_objects.setdefault(category_key, OrderedDict())
|
||||||
|
|
||||||
|
if not isinstance(category_val, dict):
|
||||||
|
raise TypeError(f"Invalid content for {category_key}")
|
||||||
|
|
||||||
for metric_key, metric_val in category_val.items():
|
for metric_key, metric_val in category_val.items():
|
||||||
try:
|
try:
|
||||||
metric_obj = Metric.make_metric(
|
metric_obj = Metric.make_metric(
|
||||||
|
@ -179,9 +204,7 @@ def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath, f"On instance {category_key}.{metric_key}", str(e),
|
||||||
"On instance {}.{}".format(category_key, metric_key),
|
|
||||||
str(e),
|
|
||||||
)
|
)
|
||||||
metric_obj = None
|
metric_obj = None
|
||||||
else:
|
else:
|
||||||
|
@ -191,7 +214,7 @@ def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
||||||
):
|
):
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath,
|
||||||
"On instance {}.{}".format(category_key, metric_key),
|
f"On instance {category_key}.{metric_key}",
|
||||||
'Only internal metrics may specify "all-pings" '
|
'Only internal metrics may specify "all-pings" '
|
||||||
'in "send_in_pings"',
|
'in "send_in_pings"',
|
||||||
)
|
)
|
||||||
|
@ -206,8 +229,9 @@ def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath,
|
||||||
"",
|
"",
|
||||||
("Duplicate metric name '{}.{}'" "already defined in '{}'").format(
|
(
|
||||||
category_key, metric_key, already_seen
|
f"Duplicate metric name '{category_key}.{metric_key}' "
|
||||||
|
f"already defined in '{already_seen}'"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -215,7 +239,13 @@ def _instantiate_metrics(all_objects, sources, content, filepath, config):
|
||||||
sources[(category_key, metric_key)] = filepath
|
sources[(category_key, metric_key)] = filepath
|
||||||
|
|
||||||
|
|
||||||
def _instantiate_pings(all_objects, sources, content, filepath, config):
|
def _instantiate_pings(
|
||||||
|
all_objects: ObjectTree,
|
||||||
|
sources: Dict[Any, Path],
|
||||||
|
content: Dict[str, util.JSONType],
|
||||||
|
filepath: Path,
|
||||||
|
config: Dict[str, Any],
|
||||||
|
) -> Generator[str, None, None]:
|
||||||
"""
|
"""
|
||||||
Load a list of pings.yaml files, convert the JSON information into Ping
|
Load a list of pings.yaml files, convert the JSON information into Ping
|
||||||
objects.
|
objects.
|
||||||
|
@ -227,18 +257,18 @@ def _instantiate_pings(all_objects, sources, content, filepath, config):
|
||||||
if ping_key in RESERVED_PING_NAMES:
|
if ping_key in RESERVED_PING_NAMES:
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath,
|
||||||
"For ping '{}'".format(ping_key),
|
f"For ping '{ping_key}'",
|
||||||
"Ping uses a reserved name ({})".format(RESERVED_PING_NAMES),
|
f"Ping uses a reserved name ({RESERVED_PING_NAMES})",
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
if not isinstance(ping_val, dict):
|
||||||
|
raise TypeError(f"Invalid content for ping {ping_key}")
|
||||||
ping_val["name"] = ping_key
|
ping_val["name"] = ping_key
|
||||||
try:
|
try:
|
||||||
ping_obj = Ping(**ping_val)
|
ping_obj = Ping(**ping_val)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
yield util.format_error(
|
yield util.format_error(filepath, f"On instance '{ping_key}'", str(e))
|
||||||
filepath, "On instance '{}'".format(ping_key), str(e)
|
continue
|
||||||
)
|
|
||||||
ping_obj = None
|
|
||||||
|
|
||||||
already_seen = sources.get(ping_key)
|
already_seen = sources.get(ping_key)
|
||||||
if already_seen is not None:
|
if already_seen is not None:
|
||||||
|
@ -246,21 +276,23 @@ def _instantiate_pings(all_objects, sources, content, filepath, config):
|
||||||
yield util.format_error(
|
yield util.format_error(
|
||||||
filepath,
|
filepath,
|
||||||
"",
|
"",
|
||||||
("Duplicate ping name '{}'" "already defined in '{}'").format(
|
f"Duplicate ping name '{ping_key}' "
|
||||||
ping_key, already_seen
|
f"already defined in '{already_seen}'",
|
||||||
),
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
all_objects.setdefault("pings", {})[ping_key] = ping_obj
|
all_objects.setdefault("pings", {})[ping_key] = ping_obj
|
||||||
sources[ping_key] = filepath
|
sources[ping_key] = filepath
|
||||||
|
|
||||||
|
|
||||||
def _preprocess_objects(objs, config):
|
def _preprocess_objects(objs: ObjectTree, config: Dict[str, Any]) -> ObjectTree:
|
||||||
"""
|
"""
|
||||||
Preprocess the object tree to better set defaults.
|
Preprocess the object tree to better set defaults.
|
||||||
"""
|
"""
|
||||||
for category in objs.values():
|
for category in objs.values():
|
||||||
for obj in category.values():
|
for obj in category.values():
|
||||||
|
if not isinstance(obj, Metric):
|
||||||
|
continue
|
||||||
|
|
||||||
if not config.get("do_not_disable_expired", False) and hasattr(
|
if not config.get("do_not_disable_expired", False) and hasattr(
|
||||||
obj, "is_disabled"
|
obj, "is_disabled"
|
||||||
):
|
):
|
||||||
|
@ -276,7 +308,9 @@ def _preprocess_objects(objs, config):
|
||||||
|
|
||||||
|
|
||||||
@util.keep_value
|
@util.keep_value
|
||||||
def parse_objects(filepaths, config={}):
|
def parse_objects(
|
||||||
|
filepaths: Iterable[Path], config: Dict[str, Any] = {}
|
||||||
|
) -> Generator[str, None, ObjectTree]:
|
||||||
"""
|
"""
|
||||||
Parse one or more metrics.yaml and/or pings.yaml files, returning a tree of
|
Parse one or more metrics.yaml and/or pings.yaml files, returning a tree of
|
||||||
`metrics.Metric` and `pings.Ping` instances.
|
`metrics.Metric` and `pings.Ping` instances.
|
||||||
|
@ -298,14 +332,15 @@ def parse_objects(filepaths, config={}):
|
||||||
files
|
files
|
||||||
:param config: A dictionary of options that change parsing behavior.
|
:param config: A dictionary of options that change parsing behavior.
|
||||||
Supported keys are:
|
Supported keys are:
|
||||||
|
|
||||||
- `allow_reserved`: Allow values reserved for internal Glean use.
|
- `allow_reserved`: Allow values reserved for internal Glean use.
|
||||||
- `do_not_disable_expired`: Don't mark expired metrics as disabled.
|
- `do_not_disable_expired`: Don't mark expired metrics as disabled.
|
||||||
This is useful when you want to retain the original "disabled"
|
This is useful when you want to retain the original "disabled"
|
||||||
value from the `metrics.yaml`, rather than having it overridden when
|
value from the `metrics.yaml`, rather than having it overridden when
|
||||||
the metric expires.
|
the metric expires.
|
||||||
"""
|
"""
|
||||||
all_objects = OrderedDict()
|
all_objects: ObjectTree = OrderedDict()
|
||||||
sources = {}
|
sources: Dict[Any, Path] = {}
|
||||||
filepaths = util.ensure_list(filepaths)
|
filepaths = util.ensure_list(filepaths)
|
||||||
for filepath in filepaths:
|
for filepath in filepaths:
|
||||||
content, filetype = yield from _load_file(filepath)
|
content, filetype = yield from _load_file(filepath)
|
||||||
|
|
|
@ -8,33 +8,27 @@
|
||||||
Classes for managing the description of pings.
|
Classes for managing the description of pings.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
|
||||||
# Import a backport of PEP487 to support __init_subclass__
|
from . import util
|
||||||
if sys.version_info < (3, 6):
|
|
||||||
import pep487
|
|
||||||
|
|
||||||
base_object = pep487.PEP487Object
|
|
||||||
else:
|
|
||||||
base_object = object
|
|
||||||
|
|
||||||
|
|
||||||
RESERVED_PING_NAMES = ["baseline", "metrics", "events", "deletion_request"]
|
RESERVED_PING_NAMES = ["baseline", "metrics", "events", "deletion-request"]
|
||||||
|
|
||||||
|
|
||||||
class Ping(base_object):
|
class Ping:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name,
|
name: str,
|
||||||
description,
|
description: str,
|
||||||
bugs,
|
bugs: List[str],
|
||||||
notification_emails,
|
notification_emails: List[str],
|
||||||
data_reviews=None,
|
data_reviews: Optional[List[str]] = None,
|
||||||
include_client_id=False,
|
include_client_id: bool = False,
|
||||||
send_if_empty=False,
|
send_if_empty: bool = False,
|
||||||
reasons=None,
|
reasons: Dict[str, str] = None,
|
||||||
_validated=False,
|
_validated: bool = False,
|
||||||
):
|
):
|
||||||
# Avoid cyclical import
|
# Avoid cyclical import
|
||||||
from . import parser
|
from . import parser
|
||||||
|
@ -55,21 +49,24 @@ class Ping(base_object):
|
||||||
# _validated indicates whether this metric has already been jsonschema
|
# _validated indicates whether this metric has already been jsonschema
|
||||||
# validated (but not any of the Python-level validation).
|
# validated (but not any of the Python-level validation).
|
||||||
if not _validated:
|
if not _validated:
|
||||||
data = {"$schema": parser.PINGS_ID, self.name: self.serialize()}
|
data: Dict[str, util.JSONType] = {
|
||||||
|
"$schema": parser.PINGS_ID,
|
||||||
|
self.name: self.serialize(),
|
||||||
|
}
|
||||||
for error in parser.validate(data):
|
for error in parser.validate(data):
|
||||||
raise ValueError(error)
|
raise ValueError(error)
|
||||||
|
|
||||||
_generate_enums = [("reason_codes", "ReasonCodes")]
|
_generate_enums = [("reason_codes", "ReasonCodes")]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self) -> str:
|
||||||
return "ping"
|
return "ping"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def reason_codes(self):
|
def reason_codes(self) -> List[str]:
|
||||||
return sorted(list(self.reasons.keys()))
|
return sorted(list(self.reasons.keys()))
|
||||||
|
|
||||||
def serialize(self):
|
def serialize(self) -> Dict[str, util.JSONType]:
|
||||||
"""
|
"""
|
||||||
Serialize the metric back to JSON object model.
|
Serialize the metric back to JSON object model.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -87,24 +87,26 @@ definitions:
|
||||||
metrics coming from GeckoView.
|
metrics coming from GeckoView.
|
||||||
|
|
||||||
- `timespan`: Represents a time interval. Additional properties:
|
- `timespan`: Represents a time interval. Additional properties:
|
||||||
`time_unit`_.
|
`time_unit`.
|
||||||
|
|
||||||
- `timing_distribution`: Record the distribution of multiple
|
- `timing_distribution`: Record the distribution of multiple
|
||||||
timings. Additional properties: `time_unit`_.
|
timings. Additional properties: `time_unit`.
|
||||||
|
|
||||||
- `datetime`: A date/time value. Represented as an ISO datetime in
|
- `datetime`: A date/time value. Represented as an ISO datetime in
|
||||||
UTC. Additional properties: `time_unit`_.
|
UTC. Additional properties: `time_unit`.
|
||||||
|
|
||||||
- `uuid`: Record a UUID v4.
|
- `uuid`: Record a UUID v4.
|
||||||
|
|
||||||
|
- `jwe`: Record a [JWE](https://tools.ietf.org/html/rfc7516) value.
|
||||||
|
|
||||||
- `memory_distribution`: A histogram for recording memory usage
|
- `memory_distribution`: A histogram for recording memory usage
|
||||||
values. Additional properties: `memory_unit`_.
|
values. Additional properties: `memory_unit`.
|
||||||
|
|
||||||
- `custom_distribution`: A histogram with a custom range and number
|
- `custom_distribution`: A histogram with a custom range and number
|
||||||
of buckets. This metric type is for legacy support only and is
|
of buckets. This metric type is for legacy support only and is
|
||||||
only allowed for metrics coming from GeckoView. Additional
|
only allowed for metrics coming from GeckoView. Additional
|
||||||
properties: `range_min`_, `range_max`_, `bucket_count`_,
|
properties: `range_min`, `range_max`, `bucket_count`,
|
||||||
`histogram_type`_.
|
`histogram_type`.
|
||||||
|
|
||||||
- Additionally, labeled versions of many metric types are supported.
|
- Additionally, labeled versions of many metric types are supported.
|
||||||
These support the `labels`_ parameter, allowing multiple instances
|
These support the `labels`_ parameter, allowing multiple instances
|
||||||
|
@ -127,6 +129,7 @@ definitions:
|
||||||
- memory_distribution
|
- memory_distribution
|
||||||
- datetime
|
- datetime
|
||||||
- uuid
|
- uuid
|
||||||
|
- jwe
|
||||||
- labeled_boolean
|
- labeled_boolean
|
||||||
- labeled_string
|
- labeled_string
|
||||||
- labeled_counter
|
- labeled_counter
|
||||||
|
@ -256,12 +259,21 @@ definitions:
|
||||||
time_unit:
|
time_unit:
|
||||||
title: Time unit
|
title: Time unit
|
||||||
description: |
|
description: |
|
||||||
Specifies the unit that the metric will be stored and displayed in. If
|
For timespans and datetimes, specifies the unit that the metric will
|
||||||
not provided, it defaults to milliseconds. Time values are sent to the
|
be stored and displayed in. If not provided, it defaults to
|
||||||
backend as integers, so `time_unit`_ determines the maximum resolution
|
"millisecond". Time values are sent to the backend as integers, so
|
||||||
at which timespans are recorded. Times are always truncated, not
|
`time_unit`_ determines the maximum resolution at which timespans are
|
||||||
rounded, to the nearest time unit. For example, a measurement of 25 ns
|
recorded. Times are always truncated, not rounded, to the nearest time
|
||||||
will be returned as 0 ms if `time_unit` is `"millisecond"`.
|
unit. For example, a measurement of 25 ns will be returned as 0 ms if
|
||||||
|
`time_unit` is `"millisecond"`.
|
||||||
|
|
||||||
|
For timing distributions, times are always recorded and sent in
|
||||||
|
nanoseconds, but `time_unit` controls the minimum and maximum values.
|
||||||
|
If not provided, it defaults to "nanosecond".
|
||||||
|
|
||||||
|
- nanosecond: 1ns <= x <= 10 minutes
|
||||||
|
- microsecond: 1μs <= x <= ~6.94 days
|
||||||
|
- millisecond: 1ms <= x <= ~19 years
|
||||||
|
|
||||||
Valid when `type`_ is `timespan`, `timing_distribution` or `datetime`.
|
Valid when `type`_ is `timespan`, `timing_distribution` or `datetime`.
|
||||||
enum:
|
enum:
|
||||||
|
@ -299,9 +311,9 @@ definitions:
|
||||||
description: |
|
description: |
|
||||||
A list of labels for a labeled metric. If provided, the labels are
|
A list of labels for a labeled metric. If provided, the labels are
|
||||||
enforced at run time, and recording to an unknown label is recorded
|
enforced at run time, and recording to an unknown label is recorded
|
||||||
to the special label ``__other__``. If not provided, the labels
|
to the special label `__other__`. If not provided, the labels
|
||||||
may be anything, but using too many unique labels will put some
|
may be anything, but using too many unique labels will put some
|
||||||
labels in the special label ``__other__``.
|
labels in the special label `__other__`.
|
||||||
|
|
||||||
Valid with any of the labeled metric types.
|
Valid with any of the labeled metric types.
|
||||||
anyOf:
|
anyOf:
|
||||||
|
@ -317,6 +329,7 @@ definitions:
|
||||||
description: |
|
description: |
|
||||||
The acceptable keys on the "extra" object sent with events. This is an
|
The acceptable keys on the "extra" object sent with events. This is an
|
||||||
object mapping the key to an object containing metadata about the key.
|
object mapping the key to an object containing metadata about the key.
|
||||||
|
A maximum of 10 extra keys is allowed.
|
||||||
This metadata object has the following keys:
|
This metadata object has the following keys:
|
||||||
|
|
||||||
- `description`: **Required.** A description of the key.
|
- `description`: **Required.** A description of the key.
|
||||||
|
@ -332,6 +345,7 @@ definitions:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- description
|
- description
|
||||||
|
maxProperties: 10
|
||||||
default: {}
|
default: {}
|
||||||
|
|
||||||
gecko_datapoint:
|
gecko_datapoint:
|
||||||
|
@ -401,6 +415,62 @@ definitions:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
decrypted_name:
|
||||||
|
title: Decrypted name
|
||||||
|
description: |
|
||||||
|
Name of the column where to persist the decrypted value
|
||||||
|
stored in the JWE after processing.
|
||||||
|
|
||||||
|
Required when `type`_ is `jwe`.
|
||||||
|
type: string
|
||||||
|
pattern: "^[a-z_][a-z0-9_]{0,29}(\\.[a-z_][a-z0-9_]{0,29})*$"
|
||||||
|
|
||||||
|
data_sensitivity:
|
||||||
|
title: The level of data sensitivity
|
||||||
|
description: |
|
||||||
|
There are four data collection categories related to data sensitivity
|
||||||
|
[defined here](https://wiki.mozilla.org/Firefox/Data_Collection):
|
||||||
|
|
||||||
|
- **Category 1: Technical Data:** (`technical`) Information about the
|
||||||
|
machine or Firefox itself. Examples include OS, available memory,
|
||||||
|
crashes and errors, outcome of automated processes like updates,
|
||||||
|
safebrowsing, activation, version \#s, and buildid. This also
|
||||||
|
includes compatibility information about features and APIs used by
|
||||||
|
websites, addons, and other 3rd-party software that interact with
|
||||||
|
Firefox during usage.
|
||||||
|
|
||||||
|
- **Category 2: Interaction Data:** (`interaction`) Information about
|
||||||
|
the user’s direct engagement with Firefox. Examples include how many
|
||||||
|
tabs, addons, or windows a user has open; uses of specific Firefox
|
||||||
|
features; session length, scrolls and clicks; and the status of
|
||||||
|
discrete user preferences.
|
||||||
|
|
||||||
|
- **Category 3: Web activity data:** (`web_activity`) Information
|
||||||
|
about user web browsing that could be considered sensitive. Examples
|
||||||
|
include users’ specific web browsing history; general information
|
||||||
|
about their web browsing history (such as TLDs or categories of
|
||||||
|
webpages visited over time); and potentially certain types of
|
||||||
|
interaction data about specific webpages visited.
|
||||||
|
|
||||||
|
- **Category 4: Highly sensitive data:** (`highly_sensitive`)
|
||||||
|
Information that directly identifies a person, or if combined with
|
||||||
|
other data could identify a person. Examples include e-mail,
|
||||||
|
usernames, identifiers such as google ad id, apple id, fxaccount,
|
||||||
|
city or country (unless small ones are explicitly filtered out), or
|
||||||
|
certain cookies. It may be embedded within specific website content,
|
||||||
|
such as memory contents, dumps, captures of screen data, or DOM
|
||||||
|
data.
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
enum:
|
||||||
|
- technical
|
||||||
|
- interaction
|
||||||
|
- web_activity
|
||||||
|
- highly_sensitive
|
||||||
|
type: string
|
||||||
|
minLength: 1
|
||||||
|
uniqueItems: true
|
||||||
|
|
||||||
required:
|
required:
|
||||||
- type
|
- type
|
||||||
- bugs
|
- bugs
|
||||||
|
@ -518,3 +588,13 @@ additionalProperties:
|
||||||
- unit
|
- unit
|
||||||
description: |
|
description: |
|
||||||
`quantity` is missing required parameter `unit`.
|
`quantity` is missing required parameter `unit`.
|
||||||
|
-
|
||||||
|
if:
|
||||||
|
properties:
|
||||||
|
type:
|
||||||
|
const: jwe
|
||||||
|
then:
|
||||||
|
required:
|
||||||
|
- decrypted_name
|
||||||
|
description: |
|
||||||
|
`jwe` is missing required parameter `decrypted_name`.
|
||||||
|
|
|
@ -10,17 +10,19 @@ Outputter to generate Swift code for metrics.
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
|
from . import metrics
|
||||||
from . import pings
|
from . import pings
|
||||||
from . import util
|
from . import util
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
# An (imcomplete) list of reserved keywords in Swift.
|
# An (imcomplete) list of reserved keywords in Swift.
|
||||||
# These will be replaced in generated code by their escaped form.
|
# These will be replaced in generated code by their escaped form.
|
||||||
SWIFT_RESERVED_NAMES = ["internal", "typealias"]
|
SWIFT_RESERVED_NAMES = ["internal", "typealias"]
|
||||||
|
|
||||||
|
|
||||||
def swift_datatypes_filter(value):
|
def swift_datatypes_filter(value: util.JSONType) -> str:
|
||||||
"""
|
"""
|
||||||
A Jinja2 filter that renders Swift literals.
|
A Jinja2 filter that renders Swift literals.
|
||||||
|
|
||||||
|
@ -62,7 +64,7 @@ def swift_datatypes_filter(value):
|
||||||
return "".join(SwiftEncoder().iterencode(value))
|
return "".join(SwiftEncoder().iterencode(value))
|
||||||
|
|
||||||
|
|
||||||
def type_name(obj):
|
def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
|
||||||
"""
|
"""
|
||||||
Returns the Swift type to use for a given metric or ping object.
|
Returns the Swift type to use for a given metric or ping object.
|
||||||
"""
|
"""
|
||||||
|
@ -83,7 +85,7 @@ def type_name(obj):
|
||||||
return class_name(obj.type)
|
return class_name(obj.type)
|
||||||
|
|
||||||
|
|
||||||
def class_name(obj_type):
|
def class_name(obj_type: str) -> str:
|
||||||
"""
|
"""
|
||||||
Returns the Swift class name for a given metric or ping type.
|
Returns the Swift class name for a given metric or ping type.
|
||||||
"""
|
"""
|
||||||
|
@ -94,7 +96,7 @@ def class_name(obj_type):
|
||||||
return util.Camelize(obj_type) + "MetricType"
|
return util.Camelize(obj_type) + "MetricType"
|
||||||
|
|
||||||
|
|
||||||
def variable_name(var):
|
def variable_name(var: str) -> str:
|
||||||
"""
|
"""
|
||||||
Returns a valid Swift variable name, escaping keywords if necessary.
|
Returns a valid Swift variable name, escaping keywords if necessary.
|
||||||
"""
|
"""
|
||||||
|
@ -104,12 +106,24 @@ def variable_name(var):
|
||||||
return var
|
return var
|
||||||
|
|
||||||
|
|
||||||
def output_swift(objs, output_dir, options={}):
|
class Category:
|
||||||
|
"""
|
||||||
|
Data struct holding information about a metric to be used in the template.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
objs: Dict[str, Union[metrics.Metric, pings.Ping]]
|
||||||
|
contains_pings: bool
|
||||||
|
|
||||||
|
|
||||||
|
def output_swift(
|
||||||
|
objs: metrics.ObjectTree, output_dir: Path, options: Dict[str, Any] = {}
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Given a tree of objects, output Swift code to `output_dir`.
|
Given a tree of objects, output Swift code to `output_dir`.
|
||||||
|
|
||||||
:param objects: A tree of objects (metrics and pings) as returned from
|
:param objects: A tree of objects (metrics and pings) as returned from
|
||||||
`parser.parse_objects`.
|
`parser.parse_objects`.
|
||||||
:param output_dir: Path to an output directory to write to.
|
:param output_dir: Path to an output directory to write to.
|
||||||
:param options: options dictionary, with the following optional keys:
|
:param options: options dictionary, with the following optional keys:
|
||||||
- namespace: The namespace to generate metrics in
|
- namespace: The namespace to generate metrics in
|
||||||
|
@ -126,49 +140,34 @@ def output_swift(objs, output_dir, options={}):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# The object parameters to pass to constructors.
|
|
||||||
# **CAUTION**: This list needs to be in the order the type constructor expects them.
|
|
||||||
# The `test_order_of_fields` test checks that the generated code is valid.
|
|
||||||
# **DO NOT CHANGE THE ORDER OR ADD NEW FIELDS IN THE MIDDLE**
|
|
||||||
extra_args = [
|
|
||||||
"category",
|
|
||||||
"name",
|
|
||||||
"send_in_pings",
|
|
||||||
"lifetime",
|
|
||||||
"disabled",
|
|
||||||
"time_unit",
|
|
||||||
"allowed_extra_keys",
|
|
||||||
"reason_codes",
|
|
||||||
]
|
|
||||||
|
|
||||||
namespace = options.get("namespace", "GleanMetrics")
|
namespace = options.get("namespace", "GleanMetrics")
|
||||||
glean_namespace = options.get("glean_namespace", "Glean")
|
glean_namespace = options.get("glean_namespace", "Glean")
|
||||||
|
|
||||||
|
filename = "Metrics.swift"
|
||||||
|
filepath = output_dir / filename
|
||||||
|
categories = []
|
||||||
|
|
||||||
for category_key, category_val in objs.items():
|
for category_key, category_val in objs.items():
|
||||||
filename = util.Camelize(category_key) + ".swift"
|
contains_pings = any(
|
||||||
filepath = output_dir / filename
|
isinstance(obj, pings.Ping) for obj in category_val.values()
|
||||||
|
|
||||||
custom_pings = defaultdict()
|
|
||||||
for obj in category_val.values():
|
|
||||||
if isinstance(obj, pings.Ping):
|
|
||||||
custom_pings[obj.name] = obj
|
|
||||||
|
|
||||||
has_labeled_metrics = any(
|
|
||||||
getattr(metric, "labeled", False) for metric in category_val.values()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with filepath.open("w", encoding="utf-8") as fd:
|
cat = Category()
|
||||||
fd.write(
|
cat.name = category_key
|
||||||
template.render(
|
cat.objs = category_val
|
||||||
category_name=category_key,
|
cat.contains_pings = contains_pings
|
||||||
objs=category_val,
|
|
||||||
extra_args=extra_args,
|
categories.append(cat)
|
||||||
namespace=namespace,
|
|
||||||
glean_namespace=glean_namespace,
|
with filepath.open("w", encoding="utf-8") as fd:
|
||||||
has_labeled_metrics=has_labeled_metrics,
|
fd.write(
|
||||||
is_ping_type=len(custom_pings) > 0,
|
template.render(
|
||||||
allow_reserved=options.get("allow_reserved", False)
|
categories=categories,
|
||||||
)
|
extra_args=util.extra_metric_args,
|
||||||
|
namespace=namespace,
|
||||||
|
glean_namespace=glean_namespace,
|
||||||
|
allow_reserved=options.get("allow_reserved", False),
|
||||||
)
|
)
|
||||||
# Jinja2 squashes the final newline, so we explicitly add it
|
)
|
||||||
fd.write("\n")
|
# Jinja2 squashes the final newline, so we explicitly add it
|
||||||
|
fd.write("\n")
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
// -*- mode: csharp -*-
|
||||||
|
|
||||||
|
/*
|
||||||
|
* AUTOGENERATED BY glean_parser. DO NOT EDIT.
|
||||||
|
*/
|
||||||
|
{# The rendered markdown is autogenerated, but this
|
||||||
|
Jinja2 template is not. Please file bugs! #}
|
||||||
|
|
||||||
|
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
{% macro obj_declaration(obj, suffix='', access='', lazy=False) %}
|
||||||
|
{{ access }} {% if lazy %} Lazy<{{ obj|type_name }}>{%- else %} {{ obj|type_name }}{% endif %} {{ obj.name|camelize }}{{ suffix }}
|
||||||
|
{%- if lazy %} = new Lazy<{{ obj|type_name }}>(() => {%- else %} ={% endif %}
|
||||||
|
|
||||||
|
new {{ obj|type_name }}(
|
||||||
|
{% for arg_name in extra_args if obj[arg_name] is defined %}
|
||||||
|
{{ arg_name|camelize }}: {{ obj[arg_name]|csharp }}{{ "," if not loop.last }}
|
||||||
|
{% endfor %}
|
||||||
|
){% if lazy %});{% else %};{% endif %}{% endmacro %}
|
||||||
|
|
||||||
|
using System;
|
||||||
|
using {{ glean_namespace }}.Private;
|
||||||
|
|
||||||
|
{# The C# metrics design require the class name to have a 'Definition'
|
||||||
|
suffix, in order to nicely call in the metrics from the consumer code.
|
||||||
|
The user code will be interested in the Value of the lazy instance, so
|
||||||
|
that's where the real class name should be used. #}
|
||||||
|
{% set metrics_class_name = category_name|Camelize + 'Definition' %}
|
||||||
|
namespace {{ namespace }}
|
||||||
|
{
|
||||||
|
internal sealed class {{ metrics_class_name }}
|
||||||
|
{
|
||||||
|
private static readonly Lazy<{{ metrics_class_name }}>
|
||||||
|
lazyInstance = new Lazy<{{ metrics_class_name }}>(() => new {{ metrics_class_name }}());
|
||||||
|
public static {{ metrics_class_name }} {{ category_name|Camelize }} => lazyInstance.Value;
|
||||||
|
|
||||||
|
// Private constructor to disallow instantiation from external callers.
|
||||||
|
private {{ metrics_class_name }}() { }
|
||||||
|
|
||||||
|
#pragma warning disable IDE1006 // Naming Styles
|
||||||
|
{% for obj in objs.values() %}
|
||||||
|
{% if obj|attr("_generate_enums") %}
|
||||||
|
{% for name, suffix in obj["_generate_enums"] %}
|
||||||
|
{% if obj|attr(name)|length %}
|
||||||
|
internal enum {{ obj.name|camelize }}{{ suffix }} {
|
||||||
|
{% for key in obj|attr(name) %}
|
||||||
|
{{ key|camelize }}{{ "," if not loop.last }}
|
||||||
|
{% endfor %}
|
||||||
|
}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% for obj in objs.values() %}
|
||||||
|
{% if obj.labeled %}
|
||||||
|
{{ obj_declaration(obj, 'Label', 'private ') }}
|
||||||
|
private readonly Lazy<LabeledMetricType<{{ obj|type_name }}>> {{ obj.name|camelize }}Lazy = new Lazy<LabeledMetricType<{{ obj|type_name }}>>(() => new LabeledMetricType(
|
||||||
|
category = {{ obj.category|csharp }},
|
||||||
|
name = {{ obj.name|csharp }},
|
||||||
|
subMetric = {{ obj.name|camelize }}Label,
|
||||||
|
disabled = {{ obj.is_disabled()|csharp }},
|
||||||
|
lifetime = {{ obj.lifetime|csharp }},
|
||||||
|
sendInPings = {{ obj.send_in_pings|csharp }},
|
||||||
|
labels = {{ obj.labels|csharp }}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
||||||
|
/// </summary>
|
||||||
|
public {{ obj|type_name }} {{ obj.name|camelize }} => {{ obj.name|camelize }}Lazy.Value;
|
||||||
|
|
||||||
|
{% else %}
|
||||||
|
{# Deal with non-ping objects first. We need them to be lazy and we
|
||||||
|
want their description to stick on an accessor object. #}
|
||||||
|
{% if obj.type != 'ping' %}
|
||||||
|
{{ obj_declaration(obj, access='private readonly', suffix='Lazy', lazy=True) }}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
||||||
|
/// </summary>
|
||||||
|
internal {{ obj|type_name }} {{ obj.name|camelize }} => {{ obj.name|camelize }}Lazy.Value;
|
||||||
|
|
||||||
|
{% else %}
|
||||||
|
{# Finally handle pings. #}
|
||||||
|
/// <summary>
|
||||||
|
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
||||||
|
/// </summary>
|
||||||
|
{{ obj_declaration(obj, access='internal readonly', lazy=False) }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
#pragma warning restore IDE1006 // Naming Styles
|
||||||
|
}
|
||||||
|
}
|
|
@ -28,19 +28,40 @@ See the Glean SDK documentation for the [`{{ ping_name }}` ping]({{ ping_name|pi
|
||||||
{% if ping_name|ping_send_if_empty %}
|
{% if ping_name|ping_send_if_empty %}
|
||||||
This ping is sent if empty.
|
This ping is sent if empty.
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if ping_name|ping_include_client_id %}
|
||||||
|
This ping includes the [client id](https://mozilla.github.io/glean/book/user/pings/index.html#the-client_info-section).
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if ping_name|ping_data_reviews %}
|
||||||
|
**Data reviews for this ping:**
|
||||||
|
|
||||||
|
{% for review in ping_name|ping_data_reviews %}
|
||||||
|
- <{{review}}>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if ping_name|ping_bugs %}
|
||||||
|
**Bugs related to this ping:**
|
||||||
|
|
||||||
|
{% for bug in ping_name|ping_bugs %}
|
||||||
|
- {% if bug|int != 0 %}{{bug}}{% else %}<{{bug}}>{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if ping_name|ping_reasons %}
|
{% if ping_name|ping_reasons %}
|
||||||
Reasons this ping may be sent:
|
**Reasons this ping may be sent:**
|
||||||
|
|
||||||
{% for (reason, desc) in ping_name|ping_reasons|dictsort %}
|
{% for (reason, desc) in ping_name|ping_reasons|dictsort %}
|
||||||
- `{{ reason }}`: {{ desc|indent(6, indentfirst=False) }}
|
- `{{ reason }}`: {{ desc|indent(6, first=False) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if metrics_by_pings[ping_name] %}
|
{% if metrics_by_pings[ping_name] %}
|
||||||
The following metrics are added to the ping:
|
The following metrics are added to the ping:
|
||||||
|
|
||||||
| Name | Type | Description | Data reviews | Extras | Expiration |
|
| Name | Type | Description | Data reviews | Extras | Expiration | [Data Sensitivity](https://wiki.mozilla.org/Firefix/Data_Collection) |
|
||||||
| --- | --- | --- | --- | --- | --- |
|
| --- | --- | --- | --- | --- | --- |
|
||||||
{% for metric in metrics_by_pings[ping_name] %}
|
{% for metric in metrics_by_pings[ping_name] %}
|
||||||
| {{ metric.identifier() }} |
|
| {{ metric.identifier() }} |
|
||||||
|
@ -57,6 +78,7 @@ The following metrics are added to the ping:
|
||||||
</ul>
|
</ul>
|
||||||
{%- endif -%} |
|
{%- endif -%} |
|
||||||
{{- metric.expires }} |
|
{{- metric.expires }} |
|
||||||
|
{{- metric.data_sensitivity|data_sensitivity_numbers }} |
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% else %}
|
{% else %}
|
||||||
This ping contains no metrics.
|
This ping contains no metrics.
|
||||||
|
@ -64,6 +86,8 @@ This ping contains no metrics.
|
||||||
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
|
Data categories are [defined here](https://wiki.mozilla.org/Firefox/Data_Collection).
|
||||||
|
|
||||||
<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
|
<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
|
||||||
{# The rendered markdown is autogenerated, but this
|
{# The rendered markdown is autogenerated, but this
|
||||||
Jinja2 template is not. Please file bugs! #}
|
Jinja2 template is not. Please file bugs! #}
|
||||||
|
|
|
@ -26,14 +26,15 @@ import {{ glean_namespace }}
|
||||||
// swiftlint:disable force_try
|
// swiftlint:disable force_try
|
||||||
|
|
||||||
extension {{ namespace }} {
|
extension {{ namespace }} {
|
||||||
{% if is_ping_type %}
|
{% for category in categories %}
|
||||||
class {{ category_name|Camelize }} {
|
{% if category.contains_pings %}
|
||||||
public static let shared = {{ category_name|Camelize }}()
|
class {{ category.name|Camelize }} {
|
||||||
|
public static let shared = {{ category.name|Camelize }}()
|
||||||
private init() {
|
private init() {
|
||||||
// Intentionally left private, no external user can instantiate a new global object.
|
// Intentionally left private, no external user can instantiate a new global object.
|
||||||
}
|
}
|
||||||
|
|
||||||
{% for obj in objs.values() %}
|
{% for obj in category.objs.values() %}
|
||||||
{% if obj|attr("_generate_enums") %}
|
{% if obj|attr("_generate_enums") %}
|
||||||
{% for name, suffix in obj["_generate_enums"] %}
|
{% for name, suffix in obj["_generate_enums"] %}
|
||||||
{% if obj|attr(name)|length %}
|
{% if obj|attr(name)|length %}
|
||||||
|
@ -50,7 +51,6 @@ extension {{ namespace }} {
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
||||||
let {{ obj.name|camelize|variable_name }} = {{obj|type_name}}(
|
let {{ obj.name|camelize|variable_name }} = {{obj|type_name}}(
|
||||||
name: {{ obj.name|swift }},
|
name: {{ obj.name|swift }},
|
||||||
|
@ -61,9 +61,10 @@ extension {{ namespace }} {
|
||||||
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
}
|
}
|
||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
enum {{ category_name|Camelize }} {
|
enum {{ category.name|Camelize }} {
|
||||||
{% for obj in objs.values() %}
|
{% for obj in category.objs.values() %}
|
||||||
{% if obj|attr("_generate_enums") %}
|
{% if obj|attr("_generate_enums") %}
|
||||||
{% for name, suffix in obj["_generate_enums"] %}
|
{% for name, suffix in obj["_generate_enums"] %}
|
||||||
{% if obj|attr(name)|length %}
|
{% if obj|attr(name)|length %}
|
||||||
|
@ -81,7 +82,7 @@ extension {{ namespace }} {
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% for obj in objs.values() %}
|
{% for obj in category.objs.values() %}
|
||||||
{% if obj.labeled %}
|
{% if obj.labeled %}
|
||||||
{{ obj_declaration(obj, 'Label', 'private ') }}
|
{{ obj_declaration(obj, 'Label', 'private ') }}
|
||||||
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
/// {{ obj.description|wordwrap() | replace('\n', '\n /// ') }}
|
||||||
|
@ -101,5 +102,7 @@ extension {{ namespace }} {
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
}
|
}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,64 +11,90 @@ High-level interface for translating `metrics.yaml` into other formats.
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from typing import Any, Callable, Dict, Iterable, List
|
||||||
|
|
||||||
from . import lint
|
from . import lint
|
||||||
from . import parser
|
from . import parser
|
||||||
|
from . import csharp
|
||||||
from . import kotlin
|
from . import kotlin
|
||||||
from . import markdown
|
from . import markdown
|
||||||
|
from . import metrics
|
||||||
from . import swift
|
from . import swift
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
|
||||||
# Each outputter in the table has the following keys:
|
class Outputter:
|
||||||
# - "output_func": the main function of the outputter, the one which
|
"""
|
||||||
# does the actual translation.
|
Class to define an output format.
|
||||||
# - "clear_output_dir": a flag to clear the target directory before moving there
|
|
||||||
# the generated files.
|
Each outputter in the table has the following member values:
|
||||||
|
|
||||||
|
- output_func: the main function of the outputter, the one which
|
||||||
|
does the actual translation.
|
||||||
|
|
||||||
|
- clear_patterns: A list of glob patterns to clear in the directory before
|
||||||
|
writing new results to it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
output_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None],
|
||||||
|
clear_patterns: List[str] = [],
|
||||||
|
):
|
||||||
|
self.output_func = output_func
|
||||||
|
self.clear_patterns = clear_patterns
|
||||||
|
|
||||||
|
|
||||||
OUTPUTTERS = {
|
OUTPUTTERS = {
|
||||||
"kotlin": {
|
"csharp": Outputter(csharp.output_csharp, ["*.cs"]),
|
||||||
"output_func": kotlin.output_kotlin,
|
"kotlin": Outputter(kotlin.output_kotlin, ["*.kt"]),
|
||||||
"clear_output_dir": True,
|
"markdown": Outputter(markdown.output_markdown),
|
||||||
"extensions": ["*.kt"],
|
"swift": Outputter(swift.output_swift, ["*.swift"]),
|
||||||
},
|
|
||||||
"markdown": {"output_func": markdown.output_markdown, "clear_output_dir": False},
|
|
||||||
"swift": {
|
|
||||||
"output_func": swift.output_swift,
|
|
||||||
"clear_output_dir": True,
|
|
||||||
"extensions": ["*.swift"],
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def translate(input_filepaths, output_format, output_dir, options={}, parser_config={}):
|
def translate_metrics(
|
||||||
|
input_filepaths: Iterable[Path],
|
||||||
|
output_dir: Path,
|
||||||
|
translation_func: Callable[[metrics.ObjectTree, Path, Dict[str, Any]], None],
|
||||||
|
clear_patterns: List[str] = [],
|
||||||
|
options: Dict[str, Any] = {},
|
||||||
|
parser_config: Dict[str, Any] = {},
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Translate the files in `input_filepaths` to the given `output_format` and
|
Translate the files in `input_filepaths` by running the metrics through a
|
||||||
put the results in `output_dir`.
|
translation function and writing the results in `output_dir`.
|
||||||
|
|
||||||
:param input_filepaths: list of paths to input metrics.yaml files
|
:param input_filepaths: list of paths to input metrics.yaml files
|
||||||
:param output_format: the name of the output formats
|
|
||||||
:param output_dir: the path to the output directory
|
:param output_dir: the path to the output directory
|
||||||
|
:param translation_func: the function that actually performs the translation.
|
||||||
|
It is passed the following arguments:
|
||||||
|
|
||||||
|
- metrics_objects: The tree of metrics as pings as returned by
|
||||||
|
`parser.parse_objects`.
|
||||||
|
- output_dir: The path to the output directory.
|
||||||
|
- options: A dictionary of output format-specific options.
|
||||||
|
|
||||||
|
Examples of translation functions are in `kotlin.py` and `swift.py`.
|
||||||
|
:param clear_patterns: a list of glob patterns of files to clear before
|
||||||
|
generating the output files. By default, no files will be cleared (i.e.
|
||||||
|
the directory should be left alone).
|
||||||
:param options: dictionary of options. The available options are backend
|
:param options: dictionary of options. The available options are backend
|
||||||
format specific.
|
format specific. These are passed unchanged to `translation_func`.
|
||||||
:param parser_config: A dictionary of options that change parsing behavior.
|
:param parser_config: A dictionary of options that change parsing behavior.
|
||||||
See `parser.parse_metrics` for more info.
|
See `parser.parse_metrics` for more info.
|
||||||
"""
|
"""
|
||||||
if output_format not in OUTPUTTERS:
|
input_filepaths = util.ensure_list(input_filepaths)
|
||||||
raise ValueError("Unknown output format '{}'".format(output_format))
|
|
||||||
|
if lint.glinter(input_filepaths, parser_config):
|
||||||
|
return 1
|
||||||
|
|
||||||
all_objects = parser.parse_objects(input_filepaths, parser_config)
|
all_objects = parser.parse_objects(input_filepaths, parser_config)
|
||||||
|
|
||||||
if util.report_validation_errors(all_objects):
|
if util.report_validation_errors(all_objects):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if lint.lint_metrics(all_objects.value, parser_config):
|
|
||||||
print(
|
|
||||||
"NOTE: These warnings will become errors in a future release of Glean.",
|
|
||||||
file=sys.stderr,
|
|
||||||
)
|
|
||||||
|
|
||||||
# allow_reserved is also relevant to the translators, so copy it there
|
# allow_reserved is also relevant to the translators, so copy it there
|
||||||
if parser_config.get("allow_reserved"):
|
if parser_config.get("allow_reserved"):
|
||||||
options["allow_reserved"] = True
|
options["allow_reserved"] = True
|
||||||
|
@ -77,19 +103,16 @@ def translate(input_filepaths, output_format, output_dir, options={}, parser_con
|
||||||
# real directory, for transactional integrity.
|
# real directory, for transactional integrity.
|
||||||
with tempfile.TemporaryDirectory() as tempdir:
|
with tempfile.TemporaryDirectory() as tempdir:
|
||||||
tempdir_path = Path(tempdir)
|
tempdir_path = Path(tempdir)
|
||||||
OUTPUTTERS[output_format]["output_func"](
|
translation_func(all_objects.value, tempdir_path, options)
|
||||||
all_objects.value, tempdir_path, options
|
|
||||||
)
|
|
||||||
|
|
||||||
if OUTPUTTERS[output_format]["clear_output_dir"]:
|
if output_dir.is_file():
|
||||||
if output_dir.is_file():
|
output_dir.unlink()
|
||||||
output_dir.unlink()
|
elif output_dir.is_dir() and len(clear_patterns):
|
||||||
elif output_dir.is_dir():
|
for clear_pattern in clear_patterns:
|
||||||
for extensions in OUTPUTTERS[output_format]["extensions"]:
|
for filepath in output_dir.glob(clear_pattern):
|
||||||
for filepath in output_dir.glob(extensions):
|
filepath.unlink()
|
||||||
filepath.unlink()
|
if len(list(output_dir.iterdir())):
|
||||||
if len(list(output_dir.iterdir())):
|
print(f"Extra contents found in '{output_dir}'.")
|
||||||
print("Extra contents found in '{}'.".format(output_dir))
|
|
||||||
|
|
||||||
# We can't use shutil.copytree alone if the directory already exists.
|
# We can't use shutil.copytree alone if the directory already exists.
|
||||||
# However, if it doesn't exist, make sure to create one otherwise
|
# However, if it doesn't exist, make sure to create one otherwise
|
||||||
|
@ -99,3 +122,37 @@ def translate(input_filepaths, output_format, output_dir, options={}, parser_con
|
||||||
shutil.copy(str(filename), str(output_dir))
|
shutil.copy(str(filename), str(output_dir))
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def translate(
|
||||||
|
input_filepaths: Iterable[Path],
|
||||||
|
output_format: str,
|
||||||
|
output_dir: Path,
|
||||||
|
options: Dict[str, Any] = {},
|
||||||
|
parser_config: Dict[str, Any] = {},
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Translate the files in `input_filepaths` to the given `output_format` and
|
||||||
|
put the results in `output_dir`.
|
||||||
|
|
||||||
|
:param input_filepaths: list of paths to input metrics.yaml files
|
||||||
|
:param output_format: the name of the output format
|
||||||
|
:param output_dir: the path to the output directory
|
||||||
|
:param options: dictionary of options. The available options are backend
|
||||||
|
format specific.
|
||||||
|
:param parser_config: A dictionary of options that change parsing behavior.
|
||||||
|
See `parser.parse_metrics` for more info.
|
||||||
|
"""
|
||||||
|
format_desc = OUTPUTTERS.get(output_format, None)
|
||||||
|
|
||||||
|
if format_desc is None:
|
||||||
|
raise ValueError(f"Unknown output format '{output_format}'")
|
||||||
|
|
||||||
|
return translate_metrics(
|
||||||
|
input_filepaths,
|
||||||
|
output_dir,
|
||||||
|
format_desc.output_func,
|
||||||
|
format_desc.clear_patterns,
|
||||||
|
options,
|
||||||
|
parser_config,
|
||||||
|
)
|
||||||
|
|
|
@ -11,24 +11,34 @@ import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from typing import Any, Callable, Iterable, Sequence, Tuple, Union
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
import appdirs
|
import appdirs # type: ignore
|
||||||
import diskcache
|
import diskcache # type: ignore
|
||||||
import jinja2
|
import jinja2
|
||||||
import jsonschema
|
import jsonschema # type: ignore
|
||||||
from jsonschema import _utils
|
from jsonschema import _utils # type: ignore
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
if sys.version_info < (3, 7):
|
if sys.version_info < (3, 7):
|
||||||
import iso8601
|
import iso8601 # type: ignore
|
||||||
|
|
||||||
|
|
||||||
TESTING_MODE = "pytest" in sys.modules
|
TESTING_MODE = "pytest" in sys.modules
|
||||||
|
|
||||||
|
|
||||||
|
JSONType = Union[list, dict, str, int, float, None]
|
||||||
|
"""
|
||||||
|
The types supported by JSON.
|
||||||
|
|
||||||
|
This is only an approximation -- this should really be a recursive type.
|
||||||
|
"""
|
||||||
|
|
||||||
# Adapted from
|
# Adapted from
|
||||||
# https://stackoverflow.com/questions/34667108/ignore-dates-and-times-while-parsing-yaml
|
# https://stackoverflow.com/questions/34667108/ignore-dates-and-times-while-parsing-yaml
|
||||||
|
|
||||||
|
|
||||||
class _NoDatesSafeLoader(yaml.SafeLoader):
|
class _NoDatesSafeLoader(yaml.SafeLoader):
|
||||||
@classmethod
|
@classmethod
|
||||||
def remove_implicit_resolver(cls, tag_to_remove):
|
def remove_implicit_resolver(cls, tag_to_remove):
|
||||||
|
@ -96,7 +106,7 @@ else:
|
||||||
return yaml.dump(data, **kwargs)
|
return yaml.dump(data, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def load_yaml_or_json(path, ordered_dict=False):
|
def load_yaml_or_json(path: Path, ordered_dict: bool = False):
|
||||||
"""
|
"""
|
||||||
Load the content from either a .json or .yaml file, based on the filename
|
Load the content from either a .json or .yaml file, based on the filename
|
||||||
extension.
|
extension.
|
||||||
|
@ -113,19 +123,19 @@ def load_yaml_or_json(path, ordered_dict=False):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if path.suffix == ".json":
|
if path.suffix == ".json":
|
||||||
with path.open("r") as fd:
|
with path.open("r", encoding="utf-8") as fd:
|
||||||
return json.load(fd)
|
return json.load(fd)
|
||||||
elif path.suffix in (".yml", ".yaml", ".yamlx"):
|
elif path.suffix in (".yml", ".yaml", ".yamlx"):
|
||||||
with path.open("r") as fd:
|
with path.open("r", encoding="utf-8") as fd:
|
||||||
if ordered_dict:
|
if ordered_dict:
|
||||||
return ordered_yaml_load(fd)
|
return ordered_yaml_load(fd)
|
||||||
else:
|
else:
|
||||||
return yaml.load(fd, Loader=_NoDatesSafeLoader)
|
return yaml.load(fd, Loader=_NoDatesSafeLoader)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown file extension {}".format(path.suffix))
|
raise ValueError(f"Unknown file extension {path.suffix}")
|
||||||
|
|
||||||
|
|
||||||
def ensure_list(value):
|
def ensure_list(value: Any) -> Sequence[Any]:
|
||||||
"""
|
"""
|
||||||
Ensures that the value is a list. If it is anything but a list or tuple, a
|
Ensures that the value is a list. If it is anything but a list or tuple, a
|
||||||
list with a single element containing only value is returned.
|
list with a single element containing only value is returned.
|
||||||
|
@ -135,7 +145,7 @@ def ensure_list(value):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def to_camel_case(input, capitalize_first_letter):
|
def to_camel_case(input: str, capitalize_first_letter: bool) -> str:
|
||||||
"""
|
"""
|
||||||
Convert the value to camelCase.
|
Convert the value to camelCase.
|
||||||
|
|
||||||
|
@ -150,10 +160,10 @@ def to_camel_case(input, capitalize_first_letter):
|
||||||
if not capitalize_first_letter:
|
if not capitalize_first_letter:
|
||||||
tokens[0] = tokens[0].lower()
|
tokens[0] = tokens[0].lower()
|
||||||
# Finally join the tokens and capitalize.
|
# Finally join the tokens and capitalize.
|
||||||
return ''.join(tokens)
|
return "".join(tokens)
|
||||||
|
|
||||||
|
|
||||||
def camelize(value):
|
def camelize(value: str) -> str:
|
||||||
"""
|
"""
|
||||||
Convert the value to camelCase (with a lower case first letter).
|
Convert the value to camelCase (with a lower case first letter).
|
||||||
|
|
||||||
|
@ -163,7 +173,7 @@ def camelize(value):
|
||||||
return to_camel_case(value, False)
|
return to_camel_case(value, False)
|
||||||
|
|
||||||
|
|
||||||
def Camelize(value):
|
def Camelize(value: str) -> str:
|
||||||
"""
|
"""
|
||||||
Convert the value to CamelCase (with an upper case first letter).
|
Convert the value to CamelCase (with an upper case first letter).
|
||||||
|
|
||||||
|
@ -174,7 +184,9 @@ def Camelize(value):
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache()
|
@functools.lru_cache()
|
||||||
def get_jinja2_template(template_name, filters=()):
|
def get_jinja2_template(
|
||||||
|
template_name: str, filters: Iterable[Tuple[str, Callable]] = ()
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Get a Jinja2 template that ships with glean_parser.
|
Get a Jinja2 template that ships with glean_parser.
|
||||||
|
|
||||||
|
@ -236,35 +248,32 @@ def get_null_resolver(schema):
|
||||||
return NullResolver.from_schema(schema)
|
return NullResolver.from_schema(schema)
|
||||||
|
|
||||||
|
|
||||||
def fetch_remote_url(url, cache=True):
|
def fetch_remote_url(url: str, cache: bool = True):
|
||||||
"""
|
"""
|
||||||
Fetches the contents from an HTTP url or local file path, and optionally
|
Fetches the contents from an HTTP url or local file path, and optionally
|
||||||
caches it to disk.
|
caches it to disk.
|
||||||
"""
|
"""
|
||||||
|
# Include the Python version in the cache key, since caches aren't
|
||||||
|
# sharable across Python versions.
|
||||||
|
key = (url, str(sys.version_info))
|
||||||
|
|
||||||
is_http = url.startswith("http")
|
is_http = url.startswith("http")
|
||||||
|
|
||||||
if not is_http:
|
if not is_http:
|
||||||
with open(url, "r", encoding="utf-8") as fd:
|
with open(url, "r", encoding="utf-8") as fd:
|
||||||
contents = fd.read()
|
return fd.read()
|
||||||
return contents
|
|
||||||
|
|
||||||
if cache:
|
if cache:
|
||||||
cache_dir = appdirs.user_cache_dir("glean_parser", "mozilla")
|
cache_dir = appdirs.user_cache_dir("glean_parser", "mozilla")
|
||||||
with diskcache.Cache(cache_dir) as dc:
|
with diskcache.Cache(cache_dir) as dc:
|
||||||
if url in dc:
|
if key in dc:
|
||||||
return dc[url]
|
return dc[key]
|
||||||
|
|
||||||
contents = urllib.request.urlopen(url).read()
|
contents: str = urllib.request.urlopen(url).read()
|
||||||
|
|
||||||
# On Python 3.5, urlopen does not handle the unicode decoding for us. This
|
|
||||||
# is ok because we control these files and we know they are in UTF-8,
|
|
||||||
# however, this wouldn't be correct in general.
|
|
||||||
if sys.version_info < (3, 6):
|
|
||||||
contents = contents.decode("utf8")
|
|
||||||
|
|
||||||
if cache:
|
if cache:
|
||||||
with diskcache.Cache(cache_dir) as dc:
|
with diskcache.Cache(cache_dir) as dc:
|
||||||
dc[url] = contents
|
dc[key] = contents
|
||||||
|
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
|
@ -272,7 +281,7 @@ def fetch_remote_url(url, cache=True):
|
||||||
_unset = _utils.Unset()
|
_unset = _utils.Unset()
|
||||||
|
|
||||||
|
|
||||||
def pprint_validation_error(error):
|
def pprint_validation_error(error) -> str:
|
||||||
"""
|
"""
|
||||||
A version of jsonschema's ValidationError __str__ method that doesn't
|
A version of jsonschema's ValidationError __str__ method that doesn't
|
||||||
include the schema fragment that failed. This makes the error messages
|
include the schema fragment that failed. This makes the error messages
|
||||||
|
@ -313,7 +322,7 @@ def pprint_validation_error(error):
|
||||||
return "\n".join(parts)
|
return "\n".join(parts)
|
||||||
|
|
||||||
|
|
||||||
def format_error(filepath, header, content):
|
def format_error(filepath: Union[str, Path], header: str, content: str) -> str:
|
||||||
"""
|
"""
|
||||||
Format a jsonshema validation error.
|
Format a jsonshema validation error.
|
||||||
"""
|
"""
|
||||||
|
@ -322,12 +331,12 @@ def format_error(filepath, header, content):
|
||||||
else:
|
else:
|
||||||
filepath = "<string>"
|
filepath = "<string>"
|
||||||
if header:
|
if header:
|
||||||
return "{}: {}\n{}".format(filepath, header, _utils.indent(content))
|
return f"{filepath}: {header}\n{_utils.indent(content)}"
|
||||||
else:
|
else:
|
||||||
return "{}:\n{}".format(filepath, _utils.indent(content))
|
return f"{filepath}:\n{_utils.indent(content)}"
|
||||||
|
|
||||||
|
|
||||||
def is_expired(expires):
|
def is_expired(expires: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Parses the `expires` field in a metric or ping and returns whether
|
Parses the `expires` field in a metric or ping and returns whether
|
||||||
the object should be considered expired.
|
the object should be considered expired.
|
||||||
|
@ -344,15 +353,13 @@ def is_expired(expires):
|
||||||
date = datetime.date.fromisoformat(expires)
|
date = datetime.date.fromisoformat(expires)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
(
|
f"Invalid expiration date '{expires}'. "
|
||||||
"Invalid expiration date '{}'. "
|
"Must be of the form yyyy-mm-dd in UTC."
|
||||||
"Must be of the form yyyy-mm-dd in UTC."
|
|
||||||
).format(expires)
|
|
||||||
)
|
)
|
||||||
return date <= datetime.datetime.utcnow().date()
|
return date <= datetime.datetime.utcnow().date()
|
||||||
|
|
||||||
|
|
||||||
def validate_expires(expires):
|
def validate_expires(expires: str) -> None:
|
||||||
"""
|
"""
|
||||||
Raises ValueError if `expires` is not valid.
|
Raises ValueError if `expires` is not valid.
|
||||||
"""
|
"""
|
||||||
|
@ -374,3 +381,40 @@ def report_validation_errors(all_objects):
|
||||||
print("=" * 78, file=sys.stderr)
|
print("=" * 78, file=sys.stderr)
|
||||||
print(error, file=sys.stderr)
|
print(error, file=sys.stderr)
|
||||||
return found_error
|
return found_error
|
||||||
|
|
||||||
|
|
||||||
|
# Names of metric parameters to pass to constructors.
|
||||||
|
# This includes only things that the language bindings care about, not things
|
||||||
|
# that are metadata-only or are resolved into other parameters at parse time.
|
||||||
|
# **CAUTION**: This list needs to be in the order the Swift type constructors
|
||||||
|
# expects them. (The other language bindings don't care about the order). The
|
||||||
|
# `test_order_of_fields` test checks that the generated code is valid.
|
||||||
|
# **DO NOT CHANGE THE ORDER OR ADD NEW FIELDS IN THE MIDDLE**
|
||||||
|
extra_metric_args = [
|
||||||
|
"category",
|
||||||
|
"name",
|
||||||
|
"send_in_pings",
|
||||||
|
"lifetime",
|
||||||
|
"disabled",
|
||||||
|
"time_unit",
|
||||||
|
"memory_unit",
|
||||||
|
"allowed_extra_keys",
|
||||||
|
"reason_codes",
|
||||||
|
"bucket_count",
|
||||||
|
"range_max",
|
||||||
|
"range_min",
|
||||||
|
"histogram_type",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Names of ping parameters to pass to constructors.
|
||||||
|
extra_ping_args = [
|
||||||
|
"include_client_id",
|
||||||
|
"send_if_empty",
|
||||||
|
"name",
|
||||||
|
"reason_codes",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Names of parameters to pass to both metric and ping constructors.
|
||||||
|
extra_args = list(set(extra_metric_args) | set(extra_ping_args))
|
||||||
|
|
|
@ -14,7 +14,7 @@ import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema # type: ignore
|
||||||
|
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ def validate_ping(ins, outs=None, schema_url=None):
|
||||||
outs = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
|
outs = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
|
||||||
|
|
||||||
if isinstance(ins, (str, bytes, Path)):
|
if isinstance(ins, (str, bytes, Path)):
|
||||||
with open(ins, "r") as fd:
|
with open(ins, "r", encoding="utf-8") as fd:
|
||||||
return _validate_ping(fd, outs, schema_url=schema_url)
|
return _validate_ping(fd, outs, schema_url=schema_url)
|
||||||
else:
|
else:
|
||||||
return _validate_ping(ins, outs, schema_url=schema_url)
|
return _validate_ping(ins, outs, schema_url=schema_url)
|
||||||
|
|
|
@ -2,6 +2,7 @@ black==19.10b0
|
||||||
coverage==4.5.2
|
coverage==4.5.2
|
||||||
flake8==3.7.8
|
flake8==3.7.8
|
||||||
m2r==0.2.1
|
m2r==0.2.1
|
||||||
|
mypy==0.761
|
||||||
pip
|
pip
|
||||||
pytest-runner==4.4
|
pytest-runner==4.4
|
||||||
pytest==4.3.0
|
pytest==4.3.0
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
coverage==4.5.2
|
|
||||||
flake8==3.7.8
|
|
||||||
m2r==0.2.1
|
|
||||||
pip
|
|
||||||
pytest-runner==4.4
|
|
||||||
pytest==4.3.0
|
|
||||||
Sphinx==1.8.4
|
|
||||||
twine==1.13.0
|
|
||||||
watchdog==0.9.0
|
|
||||||
wheel
|
|
||||||
yamllint==1.18.0
|
|
|
@ -12,33 +12,26 @@ import sys
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info < (3, 5):
|
if sys.version_info < (3, 6):
|
||||||
print("glean_parser requires at least Python 3.5", file=sys.stderr)
|
print("glean_parser requires at least Python 3.6", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
with open("README.rst") as readme_file:
|
with open("README.rst", encoding="utf-8") as readme_file:
|
||||||
readme = readme_file.read()
|
readme = readme_file.read()
|
||||||
|
|
||||||
with open("HISTORY.rst") as history_file:
|
with open("HISTORY.rst", encoding="utf-8") as history_file:
|
||||||
history = history_file.read()
|
history = history_file.read()
|
||||||
|
|
||||||
requirements = [
|
requirements = [
|
||||||
"appdirs>=1.4.3",
|
"appdirs>=1.4",
|
||||||
"Click>=7.0",
|
"Click>=7",
|
||||||
"diskcache>=4.0.0",
|
"diskcache>=4",
|
||||||
"iso8601>=0.1.12",
|
"iso8601>=0.1.10; python_version<='3.6'",
|
||||||
"Jinja2>=2.10.1,<3.0",
|
"Jinja2>=2.10.1",
|
||||||
"jsonschema>=3.0.2",
|
"jsonschema>=3.0.2",
|
||||||
# 'markupsafe' is required by Jinja2. From version 2.0.0 on
|
|
||||||
# py3.5 support is dropped.
|
|
||||||
"markupsafe>=1.1,<2.0.0",
|
|
||||||
"pep487==1.0.1",
|
|
||||||
"PyYAML>=3.13",
|
"PyYAML>=3.13",
|
||||||
"yamllint>=1.18.0",
|
"yamllint>=1.18.0",
|
||||||
# 'zipp' is required by jsonschema->importlib_metadata,
|
|
||||||
# it drops py3.5 in newer versions.
|
|
||||||
"zipp>=0.5,<2.0",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
setup_requirements = ["pytest-runner", "setuptools-scm"]
|
setup_requirements = ["pytest-runner", "setuptools-scm"]
|
||||||
|
@ -55,7 +48,6 @@ setup(
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Natural Language :: English",
|
"Natural Language :: English",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.5",
|
|
||||||
"Programming Language :: Python :: 3.6",
|
"Programming Language :: Python :: 3.6",
|
||||||
"Programming Language :: Python :: 3.7",
|
"Programming Language :: Python :: 3.7",
|
||||||
"Programming Language :: Python :: 3.8",
|
"Programming Language :: Python :: 3.8",
|
||||||
|
|
|
@ -0,0 +1,176 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Usage:
|
||||||
|
python extract_data_categories.py metrics.yaml
|
||||||
|
|
||||||
|
Automatically extract the data collection categories for all the metrics in a
|
||||||
|
metrics.yaml file by consulting the linked data reviews.
|
||||||
|
|
||||||
|
This script reads a metrics.yaml file, visits all of the associated data
|
||||||
|
reviews, trying to determine the associated data categories, and inserts them
|
||||||
|
(in place) to the original metrics.yaml file.
|
||||||
|
|
||||||
|
A very simple heuristic is used: to look for the question about data categories
|
||||||
|
used in all data reviews, and then find any numbers between it and the next
|
||||||
|
question. When this simple heuristic fails, comments with "!!!" are inserted in
|
||||||
|
the output as a recommendation to manually investigate and enter the data
|
||||||
|
categories.
|
||||||
|
|
||||||
|
Requirements from PyPI: BeautifulSoup4, PyYAML
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dbm
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import List, Set
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
cache = dbm.open("bugzilla-cache.db", "c")
|
||||||
|
|
||||||
|
|
||||||
|
QUESTION = "what collection type of data do the requested measurements fall under?"
|
||||||
|
|
||||||
|
|
||||||
|
CATEGORY_MAP = {
|
||||||
|
1: "technical",
|
||||||
|
2: "interaction",
|
||||||
|
3: "web_activity",
|
||||||
|
4: "highly_sensitive",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_url(url: str) -> str:
|
||||||
|
"""
|
||||||
|
Fetch a web page containing a data review, caching it to avoid
|
||||||
|
over-fetching.
|
||||||
|
"""
|
||||||
|
content = cache.get(url)
|
||||||
|
if content is not None:
|
||||||
|
return content
|
||||||
|
|
||||||
|
print(f"Fetching {url}")
|
||||||
|
content = urlopen(url).read()
|
||||||
|
cache[url] = content
|
||||||
|
time.sleep(0.5)
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(1000)
|
||||||
|
def parse_data_review(html: str) -> Set[int]:
|
||||||
|
"""
|
||||||
|
Parse a single data review.
|
||||||
|
"""
|
||||||
|
soup = BeautifulSoup(html, features="html.parser")
|
||||||
|
text = soup.get_text()
|
||||||
|
lines = iter(text.splitlines())
|
||||||
|
for line in lines:
|
||||||
|
if QUESTION in line.strip():
|
||||||
|
break
|
||||||
|
|
||||||
|
categories: Set[int] = set()
|
||||||
|
for line in lines:
|
||||||
|
if "?" in line:
|
||||||
|
break
|
||||||
|
categories.update(int(x) for x in re.findall("[0-9]+", line))
|
||||||
|
|
||||||
|
return categories
|
||||||
|
|
||||||
|
|
||||||
|
def categories_as_strings(categories: Set[int]) -> List[str]:
|
||||||
|
"""
|
||||||
|
From a set of numeric categories, return the strings used in a metrics.yaml
|
||||||
|
file. This may contain strings representing errors.
|
||||||
|
"""
|
||||||
|
if len(categories):
|
||||||
|
return [
|
||||||
|
CATEGORY_MAP.get(x, f"!!!UNKNOWN CATEGORY {x}")
|
||||||
|
for x in sorted(list(categories))
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
return ["!!! NO DATA CATEGORIES FOUND"]
|
||||||
|
|
||||||
|
|
||||||
|
def update_lines(
|
||||||
|
lines: List[str],
|
||||||
|
category_name: str,
|
||||||
|
metric_name: str,
|
||||||
|
data_sensitivity_values: List[str],
|
||||||
|
) -> List[str]:
|
||||||
|
"""
|
||||||
|
Update the lines of a YAML file in place to include the data_sensitivity
|
||||||
|
for the given metric, returning the lines of the result.
|
||||||
|
"""
|
||||||
|
output = []
|
||||||
|
lines_iter = iter(lines)
|
||||||
|
|
||||||
|
for line in lines_iter:
|
||||||
|
output.append(line)
|
||||||
|
if line.startswith(f"{category_name}:"):
|
||||||
|
break
|
||||||
|
|
||||||
|
for line in lines_iter:
|
||||||
|
output.append(line)
|
||||||
|
if line.startswith(f" {metric_name}:"):
|
||||||
|
break
|
||||||
|
|
||||||
|
for line in lines_iter:
|
||||||
|
output.append(line)
|
||||||
|
if line.startswith(f" data_reviews:"):
|
||||||
|
break
|
||||||
|
|
||||||
|
for line in lines_iter:
|
||||||
|
if not line.strip().startswith("- "):
|
||||||
|
output.append(" data_sensitivity:\n")
|
||||||
|
for data_sensitivity in data_sensitivity_values:
|
||||||
|
output.append(f" - {data_sensitivity}\n")
|
||||||
|
output.append(line)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
output.append(line)
|
||||||
|
|
||||||
|
for line in lines_iter:
|
||||||
|
output.append(line)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def parse_yaml(yamlpath: str):
|
||||||
|
with open(yamlpath) as fd:
|
||||||
|
content = yaml.safe_load(fd)
|
||||||
|
|
||||||
|
with open(yamlpath) as fd:
|
||||||
|
lines = list(fd.readlines())
|
||||||
|
|
||||||
|
for category_name, category in content.items():
|
||||||
|
if category_name.startswith("$") or category_name == "no_lint":
|
||||||
|
continue
|
||||||
|
for metric_name, metric in category.items():
|
||||||
|
categories = set()
|
||||||
|
for data_review_url in metric["data_reviews"]:
|
||||||
|
html = fetch_url(data_review_url)
|
||||||
|
categories.update(parse_data_review(html))
|
||||||
|
lines = update_lines(
|
||||||
|
lines, category_name, metric_name, categories_as_strings(categories)
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(yamlpath, "w") as fd:
|
||||||
|
for line in lines:
|
||||||
|
fd.write(line)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parse_yaml(sys.argv[-1])
|
|
@ -1,6 +1,6 @@
|
||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: importlib_metadata
|
Name: importlib_metadata
|
||||||
Version: 1.5.0
|
Version: 1.7.0
|
||||||
Summary: Read metadata from Python packages
|
Summary: Read metadata from Python packages
|
||||||
Home-page: http://importlib-metadata.readthedocs.io/
|
Home-page: http://importlib-metadata.readthedocs.io/
|
||||||
Author: Barry Warsaw
|
Author: Barry Warsaw
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: importlib-metadata
|
Name: importlib-metadata
|
||||||
Version: 1.5.0
|
Version: 1.7.0
|
||||||
Summary: Read metadata from Python packages
|
Summary: Read metadata from Python packages
|
||||||
Home-page: http://importlib-metadata.readthedocs.io/
|
Home-page: http://importlib-metadata.readthedocs.io/
|
||||||
Author: Barry Warsaw
|
Author: Barry Warsaw
|
||||||
|
|
|
@ -11,6 +11,7 @@ rst.linker
|
||||||
|
|
||||||
[testing]
|
[testing]
|
||||||
packaging
|
packaging
|
||||||
|
pep517
|
||||||
|
|
||||||
[testing:python_version < "3.7"]
|
[testing:python_version < "3.9"]
|
||||||
importlib_resources
|
importlib_resources>=1.3
|
||||||
|
|
|
@ -28,6 +28,8 @@ from ._compat import (
|
||||||
MetaPathFinder,
|
MetaPathFinder,
|
||||||
email_message_from_string,
|
email_message_from_string,
|
||||||
PyPy_repr,
|
PyPy_repr,
|
||||||
|
unique_ordered,
|
||||||
|
str,
|
||||||
)
|
)
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from itertools import starmap
|
from itertools import starmap
|
||||||
|
@ -53,6 +55,15 @@ __all__ = [
|
||||||
class PackageNotFoundError(ModuleNotFoundError):
|
class PackageNotFoundError(ModuleNotFoundError):
|
||||||
"""The package was not found."""
|
"""The package was not found."""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
tmpl = "No package metadata was found for {self.name}"
|
||||||
|
return tmpl.format(**locals())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
name, = self.args
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
class EntryPoint(
|
class EntryPoint(
|
||||||
PyPy_repr,
|
PyPy_repr,
|
||||||
|
@ -95,6 +106,16 @@ class EntryPoint(
|
||||||
attrs = filter(None, (match.group('attr') or '').split('.'))
|
attrs = filter(None, (match.group('attr') or '').split('.'))
|
||||||
return functools.reduce(getattr, attrs, module)
|
return functools.reduce(getattr, attrs, module)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def module(self):
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
return match.group('module')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def attr(self):
|
||||||
|
match = self.pattern.match(self.value)
|
||||||
|
return match.group('attr')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extras(self):
|
def extras(self):
|
||||||
match = self.pattern.match(self.value)
|
match = self.pattern.match(self.value)
|
||||||
|
@ -187,7 +208,7 @@ class Distribution:
|
||||||
"""
|
"""
|
||||||
for resolver in cls._discover_resolvers():
|
for resolver in cls._discover_resolvers():
|
||||||
dists = resolver(DistributionFinder.Context(name=name))
|
dists = resolver(DistributionFinder.Context(name=name))
|
||||||
dist = next(dists, None)
|
dist = next(iter(dists), None)
|
||||||
if dist is not None:
|
if dist is not None:
|
||||||
return dist
|
return dist
|
||||||
else:
|
else:
|
||||||
|
@ -230,6 +251,17 @@ class Distribution:
|
||||||
)
|
)
|
||||||
return filter(None, declared)
|
return filter(None, declared)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _local(cls, root='.'):
|
||||||
|
from pep517 import build, meta
|
||||||
|
system = build.compat_system(root)
|
||||||
|
builder = functools.partial(
|
||||||
|
meta.build,
|
||||||
|
source_dir=root,
|
||||||
|
system=system,
|
||||||
|
)
|
||||||
|
return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self):
|
def metadata(self):
|
||||||
"""Return the parsed metadata for this Distribution.
|
"""Return the parsed metadata for this Distribution.
|
||||||
|
@ -407,8 +439,8 @@ class FastPath:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root):
|
def __init__(self, root):
|
||||||
self.root = root
|
self.root = str(root)
|
||||||
self.base = os.path.basename(root).lower()
|
self.base = os.path.basename(self.root).lower()
|
||||||
|
|
||||||
def joinpath(self, child):
|
def joinpath(self, child):
|
||||||
return pathlib.Path(self.root, child)
|
return pathlib.Path(self.root, child)
|
||||||
|
@ -425,8 +457,8 @@ class FastPath:
|
||||||
names = zip_path.root.namelist()
|
names = zip_path.root.namelist()
|
||||||
self.joinpath = zip_path.joinpath
|
self.joinpath = zip_path.joinpath
|
||||||
|
|
||||||
return (
|
return unique_ordered(
|
||||||
posixpath.split(child)[0]
|
child.split(posixpath.sep, 1)[0]
|
||||||
for child in names
|
for child in names
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import abc
|
import abc
|
||||||
|
@ -9,21 +9,27 @@ import email
|
||||||
if sys.version_info > (3,): # pragma: nocover
|
if sys.version_info > (3,): # pragma: nocover
|
||||||
import builtins
|
import builtins
|
||||||
from configparser import ConfigParser
|
from configparser import ConfigParser
|
||||||
from contextlib import suppress
|
import contextlib
|
||||||
FileNotFoundError = builtins.FileNotFoundError
|
FileNotFoundError = builtins.FileNotFoundError
|
||||||
IsADirectoryError = builtins.IsADirectoryError
|
IsADirectoryError = builtins.IsADirectoryError
|
||||||
NotADirectoryError = builtins.NotADirectoryError
|
NotADirectoryError = builtins.NotADirectoryError
|
||||||
PermissionError = builtins.PermissionError
|
PermissionError = builtins.PermissionError
|
||||||
map = builtins.map
|
map = builtins.map
|
||||||
|
from itertools import filterfalse
|
||||||
else: # pragma: nocover
|
else: # pragma: nocover
|
||||||
from backports.configparser import ConfigParser
|
from backports.configparser import ConfigParser
|
||||||
from itertools import imap as map # type: ignore
|
from itertools import imap as map # type: ignore
|
||||||
from contextlib2 import suppress # noqa
|
from itertools import ifilterfalse as filterfalse
|
||||||
|
import contextlib2 as contextlib
|
||||||
FileNotFoundError = IOError, OSError
|
FileNotFoundError = IOError, OSError
|
||||||
IsADirectoryError = IOError, OSError
|
IsADirectoryError = IOError, OSError
|
||||||
NotADirectoryError = IOError, OSError
|
NotADirectoryError = IOError, OSError
|
||||||
PermissionError = IOError, OSError
|
PermissionError = IOError, OSError
|
||||||
|
|
||||||
|
str = type('')
|
||||||
|
|
||||||
|
suppress = contextlib.suppress
|
||||||
|
|
||||||
if sys.version_info > (3, 5): # pragma: nocover
|
if sys.version_info > (3, 5): # pragma: nocover
|
||||||
import pathlib
|
import pathlib
|
||||||
else: # pragma: nocover
|
else: # pragma: nocover
|
||||||
|
@ -129,3 +135,18 @@ class PyPy_repr:
|
||||||
if affected: # pragma: nocover
|
if affected: # pragma: nocover
|
||||||
__repr__ = __compat_repr__
|
__repr__ = __compat_repr__
|
||||||
del affected
|
del affected
|
||||||
|
|
||||||
|
|
||||||
|
# from itertools recipes
|
||||||
|
def unique_everseen(iterable): # pragma: nocover
|
||||||
|
"List unique elements, preserving order. Remember all elements ever seen."
|
||||||
|
seen = set()
|
||||||
|
seen_add = seen.add
|
||||||
|
|
||||||
|
for element in filterfalse(seen.__contains__, iterable):
|
||||||
|
seen_add(element)
|
||||||
|
yield element
|
||||||
|
|
||||||
|
|
||||||
|
unique_ordered = (
|
||||||
|
unique_everseen if sys.version_info < (3, 7) else dict.fromkeys)
|
||||||
|
|
|
@ -2,6 +2,46 @@
|
||||||
importlib_metadata NEWS
|
importlib_metadata NEWS
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
|
v1.7.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* ``PathNotFoundError`` now has a custom ``__str__``
|
||||||
|
mentioning "package metadata" being missing to help
|
||||||
|
guide users to the cause when the package is installed
|
||||||
|
but no metadata is present. Closes #124.
|
||||||
|
|
||||||
|
v1.6.1
|
||||||
|
======
|
||||||
|
|
||||||
|
* Added ``Distribution._local()`` as a provisional
|
||||||
|
demonstration of how to load metadata for a local
|
||||||
|
package. Implicitly requires that
|
||||||
|
`pep517 <https://pypi.org/project/pep517>`_ is
|
||||||
|
installed. Ref #42.
|
||||||
|
* Ensure inputs to FastPath are Unicode. Closes #121.
|
||||||
|
* Tests now rely on ``importlib.resources.files`` (and
|
||||||
|
backport) instead of the older ``path`` function.
|
||||||
|
* Support any iterable from ``find_distributions``.
|
||||||
|
Closes #122.
|
||||||
|
|
||||||
|
v1.6.0
|
||||||
|
======
|
||||||
|
|
||||||
|
* Added ``module`` and ``attr`` attributes to ``EntryPoint``
|
||||||
|
|
||||||
|
v1.5.2
|
||||||
|
======
|
||||||
|
|
||||||
|
* Fix redundant entries from ``FastPath.zip_children``.
|
||||||
|
Closes #117.
|
||||||
|
|
||||||
|
v1.5.1
|
||||||
|
======
|
||||||
|
|
||||||
|
* Improve reliability and consistency of compatibility
|
||||||
|
imports for contextlib and pathlib when running tests.
|
||||||
|
Closes #116.
|
||||||
|
|
||||||
v1.5.0
|
v1.5.0
|
||||||
======
|
======
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,9 @@ Entry points
|
||||||
The ``entry_points()`` function returns a dictionary of all entry points,
|
The ``entry_points()`` function returns a dictionary of all entry points,
|
||||||
keyed by group. Entry points are represented by ``EntryPoint`` instances;
|
keyed by group. Entry points are represented by ``EntryPoint`` instances;
|
||||||
each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
|
each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
|
||||||
a ``.load()`` method to resolve the value::
|
a ``.load()`` method to resolve the value. There are also ``.module``,
|
||||||
|
``.attr``, and ``.extras`` attributes for getting the components of the
|
||||||
|
``.value`` attribute::
|
||||||
|
|
||||||
>>> eps = entry_points()
|
>>> eps = entry_points()
|
||||||
>>> list(eps)
|
>>> list(eps)
|
||||||
|
@ -79,6 +81,12 @@ a ``.load()`` method to resolve the value::
|
||||||
>>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0]
|
>>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0]
|
||||||
>>> wheel
|
>>> wheel
|
||||||
EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
|
EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
|
||||||
|
>>> wheel.module
|
||||||
|
'wheel.cli'
|
||||||
|
>>> wheel.attr
|
||||||
|
'main'
|
||||||
|
>>> wheel.extras
|
||||||
|
[]
|
||||||
>>> main = wheel.load()
|
>>> main = wheel.load()
|
||||||
>>> main
|
>>> main
|
||||||
<function main at 0x103528488>
|
<function main at 0x103528488>
|
||||||
|
@ -87,7 +95,7 @@ The ``group`` and ``name`` are arbitrary values defined by the package author
|
||||||
and usually a client will wish to resolve all entry points for a particular
|
and usually a client will wish to resolve all entry points for a particular
|
||||||
group. Read `the setuptools docs
|
group. Read `the setuptools docs
|
||||||
<https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins>`_
|
<https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins>`_
|
||||||
for more information on entrypoints, their definition, and usage.
|
for more information on entry points, their definition, and usage.
|
||||||
|
|
||||||
|
|
||||||
.. _metadata:
|
.. _metadata:
|
||||||
|
@ -228,7 +236,7 @@ method::
|
||||||
"""
|
"""
|
||||||
|
|
||||||
The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
|
The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
|
||||||
properties indicating the path to search and names to match and may
|
properties indicating the path to search and name to match and may
|
||||||
supply other relevant context.
|
supply other relevant context.
|
||||||
|
|
||||||
What this means in practice is that to support finding distribution package
|
What this means in practice is that to support finding distribution package
|
||||||
|
|
0
third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py
поставляемый
Normal file
0
third_party/python/importlib_metadata/importlib_metadata/tests/__init__.py
поставляемый
Normal file
0
third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py
поставляемый
Normal file
0
third_party/python/importlib_metadata/importlib_metadata/tests/data/__init__.py
поставляемый
Normal file
Двоичные данные
third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
поставляемый
Normal file
Двоичные данные
third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
поставляемый
Normal file
Двоичный файл не отображается.
Двоичные данные
third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg
поставляемый
Normal file
Двоичные данные
third_party/python/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg
поставляемый
Normal file
Двоичный файл не отображается.
232
third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py
поставляемый
Normal file
232
third_party/python/importlib_metadata/importlib_metadata/tests/fixtures.py
поставляемый
Normal file
|
@ -0,0 +1,232 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import textwrap
|
||||||
|
import test.support
|
||||||
|
|
||||||
|
from .._compat import pathlib, contextlib
|
||||||
|
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def tempdir():
|
||||||
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
yield pathlib.Path(tmpdir)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def save_cwd():
|
||||||
|
orig = os.getcwd()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
os.chdir(orig)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def tempdir_as_cwd():
|
||||||
|
with tempdir() as tmp:
|
||||||
|
with save_cwd():
|
||||||
|
os.chdir(str(tmp))
|
||||||
|
yield tmp
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def install_finder(finder):
|
||||||
|
sys.meta_path.append(finder)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
sys.meta_path.remove(finder)
|
||||||
|
|
||||||
|
|
||||||
|
class Fixtures:
|
||||||
|
def setUp(self):
|
||||||
|
self.fixtures = contextlib.ExitStack()
|
||||||
|
self.addCleanup(self.fixtures.close)
|
||||||
|
|
||||||
|
|
||||||
|
class SiteDir(Fixtures):
|
||||||
|
def setUp(self):
|
||||||
|
super(SiteDir, self).setUp()
|
||||||
|
self.site_dir = self.fixtures.enter_context(tempdir())
|
||||||
|
|
||||||
|
|
||||||
|
class OnSysPath(Fixtures):
|
||||||
|
@staticmethod
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def add_sys_path(dir):
|
||||||
|
sys.path[:0] = [str(dir)]
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(dir))
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(OnSysPath, self).setUp()
|
||||||
|
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
|
||||||
|
|
||||||
|
|
||||||
|
class DistInfoPkg(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"distinfo_pkg-1.0.0.dist-info": {
|
||||||
|
"METADATA": """
|
||||||
|
Name: distinfo-pkg
|
||||||
|
Author: Steven Ma
|
||||||
|
Version: 1.0.0
|
||||||
|
Requires-Dist: wheel >= 1.0
|
||||||
|
Requires-Dist: pytest; extra == 'test'
|
||||||
|
""",
|
||||||
|
"RECORD": "mod.py,sha256=abc,20\n",
|
||||||
|
"entry_points.txt": """
|
||||||
|
[entries]
|
||||||
|
main = mod:main
|
||||||
|
ns:sub = mod:main
|
||||||
|
"""
|
||||||
|
},
|
||||||
|
"mod.py": """
|
||||||
|
def main():
|
||||||
|
print("hello world")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(DistInfoPkg, self).setUp()
|
||||||
|
build_files(DistInfoPkg.files, self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class DistInfoPkgOffPath(SiteDir):
|
||||||
|
def setUp(self):
|
||||||
|
super(DistInfoPkgOffPath, self).setUp()
|
||||||
|
build_files(DistInfoPkg.files, self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class EggInfoPkg(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"egginfo_pkg.egg-info": {
|
||||||
|
"PKG-INFO": """
|
||||||
|
Name: egginfo-pkg
|
||||||
|
Author: Steven Ma
|
||||||
|
License: Unknown
|
||||||
|
Version: 1.0.0
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Topic :: Software Development :: Libraries
|
||||||
|
""",
|
||||||
|
"SOURCES.txt": """
|
||||||
|
mod.py
|
||||||
|
egginfo_pkg.egg-info/top_level.txt
|
||||||
|
""",
|
||||||
|
"entry_points.txt": """
|
||||||
|
[entries]
|
||||||
|
main = mod:main
|
||||||
|
""",
|
||||||
|
"requires.txt": """
|
||||||
|
wheel >= 1.0; python_version >= "2.7"
|
||||||
|
[test]
|
||||||
|
pytest
|
||||||
|
""",
|
||||||
|
"top_level.txt": "mod\n"
|
||||||
|
},
|
||||||
|
"mod.py": """
|
||||||
|
def main():
|
||||||
|
print("hello world")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(EggInfoPkg, self).setUp()
|
||||||
|
build_files(EggInfoPkg.files, prefix=self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class EggInfoFile(OnSysPath, SiteDir):
|
||||||
|
files = {
|
||||||
|
"egginfo_file.egg-info": """
|
||||||
|
Metadata-Version: 1.0
|
||||||
|
Name: egginfo_file
|
||||||
|
Version: 0.1
|
||||||
|
Summary: An example package
|
||||||
|
Home-page: www.example.com
|
||||||
|
Author: Eric Haffa-Vee
|
||||||
|
Author-email: eric@example.coms
|
||||||
|
License: UNKNOWN
|
||||||
|
Description: UNKNOWN
|
||||||
|
Platform: UNKNOWN
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(EggInfoFile, self).setUp()
|
||||||
|
build_files(EggInfoFile.files, prefix=self.site_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalPackage:
|
||||||
|
files = {
|
||||||
|
"setup.py": """
|
||||||
|
import setuptools
|
||||||
|
setuptools.setup(name="local-pkg", version="2.0.1")
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.fixtures = contextlib.ExitStack()
|
||||||
|
self.addCleanup(self.fixtures.close)
|
||||||
|
self.fixtures.enter_context(tempdir_as_cwd())
|
||||||
|
build_files(self.files)
|
||||||
|
|
||||||
|
|
||||||
|
def build_files(file_defs, prefix=pathlib.Path()):
|
||||||
|
"""Build a set of files/directories, as described by the
|
||||||
|
|
||||||
|
file_defs dictionary. Each key/value pair in the dictionary is
|
||||||
|
interpreted as a filename/contents pair. If the contents value is a
|
||||||
|
dictionary, a directory is created, and the dictionary interpreted
|
||||||
|
as the files within it, recursively.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
{"README.txt": "A README file",
|
||||||
|
"foo": {
|
||||||
|
"__init__.py": "",
|
||||||
|
"bar": {
|
||||||
|
"__init__.py": "",
|
||||||
|
},
|
||||||
|
"baz.py": "# Some code",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
for name, contents in file_defs.items():
|
||||||
|
full_name = prefix / name
|
||||||
|
if isinstance(contents, dict):
|
||||||
|
full_name.mkdir()
|
||||||
|
build_files(contents, prefix=full_name)
|
||||||
|
else:
|
||||||
|
if isinstance(contents, bytes):
|
||||||
|
with full_name.open('wb') as f:
|
||||||
|
f.write(contents)
|
||||||
|
else:
|
||||||
|
with full_name.open('w') as f:
|
||||||
|
f.write(DALS(contents))
|
||||||
|
|
||||||
|
|
||||||
|
class FileBuilder:
|
||||||
|
def unicode_filename(self):
|
||||||
|
return test.support.FS_NONASCII or \
|
||||||
|
self.skip("File system does not support non-ascii.")
|
||||||
|
|
||||||
|
|
||||||
|
def DALS(str):
|
||||||
|
"Dedent and left-strip"
|
||||||
|
return textwrap.dedent(str).lstrip()
|
||||||
|
|
||||||
|
|
||||||
|
class NullFinder:
|
||||||
|
def find_module(self, name):
|
||||||
|
pass
|
176
third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py
поставляемый
Normal file
176
third_party/python/importlib_metadata/importlib_metadata/tests/test_api.py
поставляемый
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
import re
|
||||||
|
import textwrap
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from . import fixtures
|
||||||
|
from .. import (
|
||||||
|
Distribution, PackageNotFoundError, __version__, distribution,
|
||||||
|
entry_points, files, metadata, requires, version,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections.abc import Iterator
|
||||||
|
except ImportError:
|
||||||
|
from collections import Iterator # noqa: F401
|
||||||
|
|
||||||
|
try:
|
||||||
|
from builtins import str as text
|
||||||
|
except ImportError:
|
||||||
|
from __builtin__ import unicode as text
|
||||||
|
|
||||||
|
|
||||||
|
class APITests(
|
||||||
|
fixtures.EggInfoPkg,
|
||||||
|
fixtures.DistInfoPkg,
|
||||||
|
fixtures.EggInfoFile,
|
||||||
|
unittest.TestCase):
|
||||||
|
|
||||||
|
version_pattern = r'\d+\.\d+(\.\d)?'
|
||||||
|
|
||||||
|
def test_retrieves_version_of_self(self):
|
||||||
|
pkg_version = version('egginfo-pkg')
|
||||||
|
assert isinstance(pkg_version, text)
|
||||||
|
assert re.match(self.version_pattern, pkg_version)
|
||||||
|
|
||||||
|
def test_retrieves_version_of_distinfo_pkg(self):
|
||||||
|
pkg_version = version('distinfo-pkg')
|
||||||
|
assert isinstance(pkg_version, text)
|
||||||
|
assert re.match(self.version_pattern, pkg_version)
|
||||||
|
|
||||||
|
def test_for_name_does_not_exist(self):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
distribution('does-not-exist')
|
||||||
|
|
||||||
|
def test_for_top_level(self):
|
||||||
|
self.assertEqual(
|
||||||
|
distribution('egginfo-pkg').read_text('top_level.txt').strip(),
|
||||||
|
'mod')
|
||||||
|
|
||||||
|
def test_read_text(self):
|
||||||
|
top_level = [
|
||||||
|
path for path in files('egginfo-pkg')
|
||||||
|
if path.name == 'top_level.txt'
|
||||||
|
][0]
|
||||||
|
self.assertEqual(top_level.read_text(), 'mod\n')
|
||||||
|
|
||||||
|
def test_entry_points(self):
|
||||||
|
entries = dict(entry_points()['entries'])
|
||||||
|
ep = entries['main']
|
||||||
|
self.assertEqual(ep.value, 'mod:main')
|
||||||
|
self.assertEqual(ep.extras, [])
|
||||||
|
|
||||||
|
def test_metadata_for_this_package(self):
|
||||||
|
md = metadata('egginfo-pkg')
|
||||||
|
assert md['author'] == 'Steven Ma'
|
||||||
|
assert md['LICENSE'] == 'Unknown'
|
||||||
|
assert md['Name'] == 'egginfo-pkg'
|
||||||
|
classifiers = md.get_all('Classifier')
|
||||||
|
assert 'Topic :: Software Development :: Libraries' in classifiers
|
||||||
|
|
||||||
|
def test_importlib_metadata_version(self):
|
||||||
|
assert re.match(self.version_pattern, __version__)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _test_files(files):
|
||||||
|
root = files[0].root
|
||||||
|
for file in files:
|
||||||
|
assert file.root == root
|
||||||
|
assert not file.hash or file.hash.value
|
||||||
|
assert not file.hash or file.hash.mode == 'sha256'
|
||||||
|
assert not file.size or file.size >= 0
|
||||||
|
assert file.locate().exists()
|
||||||
|
assert isinstance(file.read_binary(), bytes)
|
||||||
|
if file.name.endswith('.py'):
|
||||||
|
file.read_text()
|
||||||
|
|
||||||
|
def test_file_hash_repr(self):
|
||||||
|
try:
|
||||||
|
assertRegex = self.assertRegex
|
||||||
|
except AttributeError:
|
||||||
|
# Python 2
|
||||||
|
assertRegex = self.assertRegexpMatches
|
||||||
|
|
||||||
|
util = [
|
||||||
|
p for p in files('distinfo-pkg')
|
||||||
|
if p.name == 'mod.py'
|
||||||
|
][0]
|
||||||
|
assertRegex(
|
||||||
|
repr(util.hash),
|
||||||
|
'<FileHash mode: sha256 value: .*>')
|
||||||
|
|
||||||
|
def test_files_dist_info(self):
|
||||||
|
self._test_files(files('distinfo-pkg'))
|
||||||
|
|
||||||
|
def test_files_egg_info(self):
|
||||||
|
self._test_files(files('egginfo-pkg'))
|
||||||
|
|
||||||
|
def test_version_egg_info_file(self):
|
||||||
|
self.assertEqual(version('egginfo-file'), '0.1')
|
||||||
|
|
||||||
|
def test_requires_egg_info_file(self):
|
||||||
|
requirements = requires('egginfo-file')
|
||||||
|
self.assertIsNone(requirements)
|
||||||
|
|
||||||
|
def test_requires_egg_info(self):
|
||||||
|
deps = requires('egginfo-pkg')
|
||||||
|
assert len(deps) == 2
|
||||||
|
assert any(
|
||||||
|
dep == 'wheel >= 1.0; python_version >= "2.7"'
|
||||||
|
for dep in deps
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_requires_dist_info(self):
|
||||||
|
deps = requires('distinfo-pkg')
|
||||||
|
assert len(deps) == 2
|
||||||
|
assert all(deps)
|
||||||
|
assert 'wheel >= 1.0' in deps
|
||||||
|
assert "pytest; extra == 'test'" in deps
|
||||||
|
|
||||||
|
def test_more_complex_deps_requires_text(self):
|
||||||
|
requires = textwrap.dedent("""
|
||||||
|
dep1
|
||||||
|
dep2
|
||||||
|
|
||||||
|
[:python_version < "3"]
|
||||||
|
dep3
|
||||||
|
|
||||||
|
[extra1]
|
||||||
|
dep4
|
||||||
|
|
||||||
|
[extra2:python_version < "3"]
|
||||||
|
dep5
|
||||||
|
""")
|
||||||
|
deps = sorted(Distribution._deps_from_requires_text(requires))
|
||||||
|
expected = [
|
||||||
|
'dep1',
|
||||||
|
'dep2',
|
||||||
|
'dep3; python_version < "3"',
|
||||||
|
'dep4; extra == "extra1"',
|
||||||
|
'dep5; (python_version < "3") and extra == "extra2"',
|
||||||
|
]
|
||||||
|
# It's important that the environment marker expression be
|
||||||
|
# wrapped in parentheses to avoid the following 'and' binding more
|
||||||
|
# tightly than some other part of the environment expression.
|
||||||
|
|
||||||
|
assert deps == expected
|
||||||
|
|
||||||
|
|
||||||
|
class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase):
|
||||||
|
def test_find_distributions_specified_path(self):
|
||||||
|
dists = Distribution.discover(path=[str(self.site_dir)])
|
||||||
|
assert any(
|
||||||
|
dist.metadata['Name'] == 'distinfo-pkg'
|
||||||
|
for dist in dists
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_distribution_at_pathlib(self):
|
||||||
|
"""Demonstrate how to load metadata direct from a directory.
|
||||||
|
"""
|
||||||
|
dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
|
||||||
|
dist = Distribution.at(dist_info_path)
|
||||||
|
assert dist.version == '1.0.0'
|
||||||
|
|
||||||
|
def test_distribution_at_str(self):
|
||||||
|
dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
|
||||||
|
dist = Distribution.at(str(dist_info_path))
|
||||||
|
assert dist.version == '1.0.0'
|
54
third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py
поставляемый
Normal file
54
third_party/python/importlib_metadata/importlib_metadata/tests/test_integration.py
поставляемый
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import packaging.requirements
|
||||||
|
import packaging.version
|
||||||
|
|
||||||
|
from . import fixtures
|
||||||
|
from .. import (
|
||||||
|
Distribution,
|
||||||
|
_compat,
|
||||||
|
version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase):
|
||||||
|
|
||||||
|
def test_package_spec_installed(self):
|
||||||
|
"""
|
||||||
|
Illustrate the recommended procedure to determine if
|
||||||
|
a specified version of a package is installed.
|
||||||
|
"""
|
||||||
|
def is_installed(package_spec):
|
||||||
|
req = packaging.requirements.Requirement(package_spec)
|
||||||
|
return version(req.name) in req.specifier
|
||||||
|
|
||||||
|
assert is_installed('distinfo-pkg==1.0')
|
||||||
|
assert is_installed('distinfo-pkg>=1.0,<2.0')
|
||||||
|
assert not is_installed('distinfo-pkg<1.0')
|
||||||
|
|
||||||
|
|
||||||
|
class FinderTests(fixtures.Fixtures, unittest.TestCase):
|
||||||
|
|
||||||
|
def test_finder_without_module(self):
|
||||||
|
class ModuleFreeFinder(fixtures.NullFinder):
|
||||||
|
"""
|
||||||
|
A finder without an __module__ attribute
|
||||||
|
"""
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
if name == '__module__':
|
||||||
|
raise AttributeError(name)
|
||||||
|
return super().__getattribute__(name)
|
||||||
|
|
||||||
|
self.fixtures.enter_context(
|
||||||
|
fixtures.install_finder(ModuleFreeFinder()))
|
||||||
|
_compat.disable_stdlib_finder()
|
||||||
|
|
||||||
|
|
||||||
|
class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase):
|
||||||
|
def test_find_local(self):
|
||||||
|
dist = Distribution._local()
|
||||||
|
assert dist.metadata['Name'] == 'local-pkg'
|
||||||
|
assert dist.version == '2.0.1'
|
285
third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py
поставляемый
Normal file
285
third_party/python/importlib_metadata/importlib_metadata/tests/test_main.py
поставляемый
Normal file
|
@ -0,0 +1,285 @@
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import pickle
|
||||||
|
import textwrap
|
||||||
|
import unittest
|
||||||
|
import importlib
|
||||||
|
import importlib_metadata
|
||||||
|
import pyfakefs.fake_filesystem_unittest as ffs
|
||||||
|
|
||||||
|
from . import fixtures
|
||||||
|
from .. import (
|
||||||
|
Distribution, EntryPoint, MetadataPathFinder,
|
||||||
|
PackageNotFoundError, distributions,
|
||||||
|
entry_points, metadata, version,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from builtins import str as text
|
||||||
|
except ImportError:
|
||||||
|
from __builtin__ import unicode as text
|
||||||
|
|
||||||
|
|
||||||
|
class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
|
||||||
|
version_pattern = r'\d+\.\d+(\.\d)?'
|
||||||
|
|
||||||
|
def test_retrieves_version_of_self(self):
|
||||||
|
dist = Distribution.from_name('distinfo-pkg')
|
||||||
|
assert isinstance(dist.version, text)
|
||||||
|
assert re.match(self.version_pattern, dist.version)
|
||||||
|
|
||||||
|
def test_for_name_does_not_exist(self):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
Distribution.from_name('does-not-exist')
|
||||||
|
|
||||||
|
def test_package_not_found_mentions_metadata(self):
|
||||||
|
"""
|
||||||
|
When a package is not found, that could indicate that the
|
||||||
|
packgae is not installed or that it is installed without
|
||||||
|
metadata. Ensure the exception mentions metadata to help
|
||||||
|
guide users toward the cause. See #124.
|
||||||
|
"""
|
||||||
|
with self.assertRaises(PackageNotFoundError) as ctx:
|
||||||
|
Distribution.from_name('does-not-exist')
|
||||||
|
|
||||||
|
assert "metadata" in str(ctx.exception)
|
||||||
|
|
||||||
|
def test_new_style_classes(self):
|
||||||
|
self.assertIsInstance(Distribution, type)
|
||||||
|
self.assertIsInstance(MetadataPathFinder, type)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportTests(fixtures.DistInfoPkg, unittest.TestCase):
|
||||||
|
def test_import_nonexistent_module(self):
|
||||||
|
# Ensure that the MetadataPathFinder does not crash an import of a
|
||||||
|
# non-existent module.
|
||||||
|
with self.assertRaises(ImportError):
|
||||||
|
importlib.import_module('does_not_exist')
|
||||||
|
|
||||||
|
def test_resolve(self):
|
||||||
|
entries = dict(entry_points()['entries'])
|
||||||
|
ep = entries['main']
|
||||||
|
self.assertEqual(ep.load().__name__, "main")
|
||||||
|
|
||||||
|
def test_entrypoint_with_colon_in_name(self):
|
||||||
|
entries = dict(entry_points()['entries'])
|
||||||
|
ep = entries['ns:sub']
|
||||||
|
self.assertEqual(ep.value, 'mod:main')
|
||||||
|
|
||||||
|
def test_resolve_without_attr(self):
|
||||||
|
ep = EntryPoint(
|
||||||
|
name='ep',
|
||||||
|
value='importlib_metadata',
|
||||||
|
group='grp',
|
||||||
|
)
|
||||||
|
assert ep.load() is importlib_metadata
|
||||||
|
|
||||||
|
|
||||||
|
class NameNormalizationTests(
|
||||||
|
fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
|
||||||
|
@staticmethod
|
||||||
|
def pkg_with_dashes(site_dir):
|
||||||
|
"""
|
||||||
|
Create minimal metadata for a package with dashes
|
||||||
|
in the name (and thus underscores in the filename).
|
||||||
|
"""
|
||||||
|
metadata_dir = site_dir / 'my_pkg.dist-info'
|
||||||
|
metadata_dir.mkdir()
|
||||||
|
metadata = metadata_dir / 'METADATA'
|
||||||
|
with metadata.open('w') as strm:
|
||||||
|
strm.write('Version: 1.0\n')
|
||||||
|
return 'my-pkg'
|
||||||
|
|
||||||
|
def test_dashes_in_dist_name_found_as_underscores(self):
|
||||||
|
"""
|
||||||
|
For a package with a dash in the name, the dist-info metadata
|
||||||
|
uses underscores in the name. Ensure the metadata loads.
|
||||||
|
"""
|
||||||
|
pkg_name = self.pkg_with_dashes(self.site_dir)
|
||||||
|
assert version(pkg_name) == '1.0'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pkg_with_mixed_case(site_dir):
|
||||||
|
"""
|
||||||
|
Create minimal metadata for a package with mixed case
|
||||||
|
in the name.
|
||||||
|
"""
|
||||||
|
metadata_dir = site_dir / 'CherryPy.dist-info'
|
||||||
|
metadata_dir.mkdir()
|
||||||
|
metadata = metadata_dir / 'METADATA'
|
||||||
|
with metadata.open('w') as strm:
|
||||||
|
strm.write('Version: 1.0\n')
|
||||||
|
return 'CherryPy'
|
||||||
|
|
||||||
|
def test_dist_name_found_as_any_case(self):
|
||||||
|
"""
|
||||||
|
Ensure the metadata loads when queried with any case.
|
||||||
|
"""
|
||||||
|
pkg_name = self.pkg_with_mixed_case(self.site_dir)
|
||||||
|
assert version(pkg_name) == '1.0'
|
||||||
|
assert version(pkg_name.lower()) == '1.0'
|
||||||
|
assert version(pkg_name.upper()) == '1.0'
|
||||||
|
|
||||||
|
|
||||||
|
class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
|
||||||
|
@staticmethod
|
||||||
|
def pkg_with_non_ascii_description(site_dir):
|
||||||
|
"""
|
||||||
|
Create minimal metadata for a package with non-ASCII in
|
||||||
|
the description.
|
||||||
|
"""
|
||||||
|
metadata_dir = site_dir / 'portend.dist-info'
|
||||||
|
metadata_dir.mkdir()
|
||||||
|
metadata = metadata_dir / 'METADATA'
|
||||||
|
with metadata.open('w', encoding='utf-8') as fp:
|
||||||
|
fp.write('Description: pôrˈtend\n')
|
||||||
|
return 'portend'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pkg_with_non_ascii_description_egg_info(site_dir):
|
||||||
|
"""
|
||||||
|
Create minimal metadata for an egg-info package with
|
||||||
|
non-ASCII in the description.
|
||||||
|
"""
|
||||||
|
metadata_dir = site_dir / 'portend.dist-info'
|
||||||
|
metadata_dir.mkdir()
|
||||||
|
metadata = metadata_dir / 'METADATA'
|
||||||
|
with metadata.open('w', encoding='utf-8') as fp:
|
||||||
|
fp.write(textwrap.dedent("""
|
||||||
|
Name: portend
|
||||||
|
|
||||||
|
pôrˈtend
|
||||||
|
""").lstrip())
|
||||||
|
return 'portend'
|
||||||
|
|
||||||
|
def test_metadata_loads(self):
|
||||||
|
pkg_name = self.pkg_with_non_ascii_description(self.site_dir)
|
||||||
|
meta = metadata(pkg_name)
|
||||||
|
assert meta['Description'] == 'pôrˈtend'
|
||||||
|
|
||||||
|
def test_metadata_loads_egg_info(self):
|
||||||
|
pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir)
|
||||||
|
meta = metadata(pkg_name)
|
||||||
|
assert meta.get_payload() == 'pôrˈtend\n'
|
||||||
|
|
||||||
|
|
||||||
|
class DiscoveryTests(fixtures.EggInfoPkg,
|
||||||
|
fixtures.DistInfoPkg,
|
||||||
|
unittest.TestCase):
|
||||||
|
|
||||||
|
def test_package_discovery(self):
|
||||||
|
dists = list(distributions())
|
||||||
|
assert all(
|
||||||
|
isinstance(dist, Distribution)
|
||||||
|
for dist in dists
|
||||||
|
)
|
||||||
|
assert any(
|
||||||
|
dist.metadata['Name'] == 'egginfo-pkg'
|
||||||
|
for dist in dists
|
||||||
|
)
|
||||||
|
assert any(
|
||||||
|
dist.metadata['Name'] == 'distinfo-pkg'
|
||||||
|
for dist in dists
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_invalid_usage(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
list(distributions(context='something', name='else'))
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
|
||||||
|
def test_egg_info(self):
|
||||||
|
# make an `EGG-INFO` directory that's unrelated
|
||||||
|
self.site_dir.joinpath('EGG-INFO').mkdir()
|
||||||
|
# used to crash with `IsADirectoryError`
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
version('unknown-package')
|
||||||
|
|
||||||
|
def test_egg(self):
|
||||||
|
egg = self.site_dir.joinpath('foo-3.6.egg')
|
||||||
|
egg.mkdir()
|
||||||
|
with self.add_sys_path(egg):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
version('foo')
|
||||||
|
|
||||||
|
|
||||||
|
class MissingSysPath(fixtures.OnSysPath, unittest.TestCase):
|
||||||
|
site_dir = '/does-not-exist'
|
||||||
|
|
||||||
|
def test_discovery(self):
|
||||||
|
"""
|
||||||
|
Discovering distributions should succeed even if
|
||||||
|
there is an invalid path on sys.path.
|
||||||
|
"""
|
||||||
|
importlib_metadata.distributions()
|
||||||
|
|
||||||
|
|
||||||
|
class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase):
|
||||||
|
site_dir = '/access-denied'
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(InaccessibleSysPath, self).setUp()
|
||||||
|
self.setUpPyfakefs()
|
||||||
|
self.fs.create_dir(self.site_dir, perm_bits=000)
|
||||||
|
|
||||||
|
def test_discovery(self):
|
||||||
|
"""
|
||||||
|
Discovering distributions should succeed even if
|
||||||
|
there is an invalid path on sys.path.
|
||||||
|
"""
|
||||||
|
list(importlib_metadata.distributions())
|
||||||
|
|
||||||
|
|
||||||
|
class TestEntryPoints(unittest.TestCase):
|
||||||
|
def __init__(self, *args):
|
||||||
|
super(TestEntryPoints, self).__init__(*args)
|
||||||
|
self.ep = importlib_metadata.EntryPoint('name', 'value', 'group')
|
||||||
|
|
||||||
|
def test_entry_point_pickleable(self):
|
||||||
|
revived = pickle.loads(pickle.dumps(self.ep))
|
||||||
|
assert revived == self.ep
|
||||||
|
|
||||||
|
def test_immutable(self):
|
||||||
|
"""EntryPoints should be immutable"""
|
||||||
|
with self.assertRaises(AttributeError):
|
||||||
|
self.ep.name = 'badactor'
|
||||||
|
|
||||||
|
def test_repr(self):
|
||||||
|
assert 'EntryPoint' in repr(self.ep)
|
||||||
|
assert 'name=' in repr(self.ep)
|
||||||
|
assert "'name'" in repr(self.ep)
|
||||||
|
|
||||||
|
def test_hashable(self):
|
||||||
|
"""EntryPoints should be hashable"""
|
||||||
|
hash(self.ep)
|
||||||
|
|
||||||
|
def test_json_dump(self):
|
||||||
|
"""
|
||||||
|
json should not expect to be able to dump an EntryPoint
|
||||||
|
"""
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
json.dumps(self.ep)
|
||||||
|
|
||||||
|
def test_module(self):
|
||||||
|
assert self.ep.module == 'value'
|
||||||
|
|
||||||
|
def test_attr(self):
|
||||||
|
assert self.ep.attr is None
|
||||||
|
|
||||||
|
|
||||||
|
class FileSystem(
|
||||||
|
fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder,
|
||||||
|
unittest.TestCase):
|
||||||
|
def test_unicode_dir_on_sys_path(self):
|
||||||
|
"""
|
||||||
|
Ensure a Unicode subdirectory of a directory on sys.path
|
||||||
|
does not crash.
|
||||||
|
"""
|
||||||
|
fixtures.build_files(
|
||||||
|
{self.unicode_filename(): {}},
|
||||||
|
prefix=self.site_dir,
|
||||||
|
)
|
||||||
|
list(distributions())
|
80
third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py
поставляемый
Normal file
80
third_party/python/importlib_metadata/importlib_metadata/tests/test_zip.py
поставляемый
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
distribution, entry_points, files, PackageNotFoundError,
|
||||||
|
version, distributions,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib import resources
|
||||||
|
getattr(resources, 'files')
|
||||||
|
getattr(resources, 'as_file')
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
import importlib_resources as resources
|
||||||
|
|
||||||
|
try:
|
||||||
|
from contextlib import ExitStack
|
||||||
|
except ImportError:
|
||||||
|
from contextlib2 import ExitStack
|
||||||
|
|
||||||
|
|
||||||
|
class TestZip(unittest.TestCase):
|
||||||
|
root = 'importlib_metadata.tests.data'
|
||||||
|
|
||||||
|
def _fixture_on_path(self, filename):
|
||||||
|
pkg_file = resources.files(self.root).joinpath(filename)
|
||||||
|
file = self.resources.enter_context(resources.as_file(pkg_file))
|
||||||
|
assert file.name.startswith('example-'), file.name
|
||||||
|
sys.path.insert(0, str(file))
|
||||||
|
self.resources.callback(sys.path.pop, 0)
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Find the path to the example-*.whl so we can add it to the front of
|
||||||
|
# sys.path, where we'll then try to find the metadata thereof.
|
||||||
|
self.resources = ExitStack()
|
||||||
|
self.addCleanup(self.resources.close)
|
||||||
|
self._fixture_on_path('example-21.12-py3-none-any.whl')
|
||||||
|
|
||||||
|
def test_zip_version(self):
|
||||||
|
self.assertEqual(version('example'), '21.12')
|
||||||
|
|
||||||
|
def test_zip_version_does_not_match(self):
|
||||||
|
with self.assertRaises(PackageNotFoundError):
|
||||||
|
version('definitely-not-installed')
|
||||||
|
|
||||||
|
def test_zip_entry_points(self):
|
||||||
|
scripts = dict(entry_points()['console_scripts'])
|
||||||
|
entry_point = scripts['example']
|
||||||
|
self.assertEqual(entry_point.value, 'example:main')
|
||||||
|
entry_point = scripts['Example']
|
||||||
|
self.assertEqual(entry_point.value, 'example:main')
|
||||||
|
|
||||||
|
def test_missing_metadata(self):
|
||||||
|
self.assertIsNone(distribution('example').read_text('does not exist'))
|
||||||
|
|
||||||
|
def test_case_insensitive(self):
|
||||||
|
self.assertEqual(version('Example'), '21.12')
|
||||||
|
|
||||||
|
def test_files(self):
|
||||||
|
for file in files('example'):
|
||||||
|
path = str(file.dist.locate_file(file))
|
||||||
|
assert '.whl/' in path, path
|
||||||
|
|
||||||
|
def test_one_distribution(self):
|
||||||
|
dists = list(distributions(path=sys.path[:1]))
|
||||||
|
assert len(dists) == 1
|
||||||
|
|
||||||
|
|
||||||
|
class TestEgg(TestZip):
|
||||||
|
def setUp(self):
|
||||||
|
# Find the path to the example-*.egg so we can add it to the front of
|
||||||
|
# sys.path, where we'll then try to find the metadata thereof.
|
||||||
|
self.resources = ExitStack()
|
||||||
|
self.addCleanup(self.resources.close)
|
||||||
|
self._fixture_on_path('example-21.12-py3.6.egg')
|
||||||
|
|
||||||
|
def test_files(self):
|
||||||
|
for file in files('example'):
|
||||||
|
path = str(file.dist.locate_file(file))
|
||||||
|
assert '.egg/' in path, path
|
|
@ -48,8 +48,9 @@ universal = 1
|
||||||
|
|
||||||
[options.extras_require]
|
[options.extras_require]
|
||||||
testing =
|
testing =
|
||||||
importlib_resources; python_version < "3.7"
|
importlib_resources>=1.3; python_version < "3.9"
|
||||||
packaging
|
packaging
|
||||||
|
pep517
|
||||||
docs =
|
docs =
|
||||||
sphinx
|
sphinx
|
||||||
rst.linker
|
rst.linker
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
TODO
|
|
@ -0,0 +1,8 @@
|
||||||
|
language: python
|
||||||
|
python: "3.7"
|
||||||
|
node_js: "9"
|
||||||
|
install:
|
||||||
|
- pip install tox
|
||||||
|
script:
|
||||||
|
- tox
|
||||||
|
- npm install && npm test || true
|
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright (c) 2012 Julian Berman
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
|
@ -0,0 +1,181 @@
|
||||||
|
JSON Schema Test Suite [![Build Status](https://travis-ci.org/json-schema-org/JSON-Schema-Test-Suite.svg?branch=master)](https://travis-ci.org/json-schema-org/JSON-Schema-Test-Suite)
|
||||||
|
======================
|
||||||
|
|
||||||
|
This repository contains a set of JSON objects that implementors of JSON Schema
|
||||||
|
validation libraries can use to test their validators.
|
||||||
|
|
||||||
|
It is meant to be language agnostic and should require only a JSON parser.
|
||||||
|
|
||||||
|
The conversion of the JSON objects into tests within your test framework of
|
||||||
|
choice is still the job of the validator implementor.
|
||||||
|
|
||||||
|
Structure of a Test
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If you're going to use this suite, you need to know how tests are laid out. The
|
||||||
|
tests are contained in the `tests` directory at the root of this repository.
|
||||||
|
|
||||||
|
Inside that directory is a subdirectory for each draft or version of the
|
||||||
|
schema.
|
||||||
|
|
||||||
|
If you look inside the draft directory, there are a number of `.json` files,
|
||||||
|
which logically group a set of test cases together. Often the grouping is by
|
||||||
|
property under test, but not always, especially within optional test files
|
||||||
|
(discussed below).
|
||||||
|
|
||||||
|
Inside each `.json` file is a single array containing objects. It's easiest to
|
||||||
|
illustrate the structure of these with an example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"description": "the description of the test case",
|
||||||
|
"schema": {"the schema that should" : "be validated against"},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "a specific test of a valid instance",
|
||||||
|
"data": "the instance",
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "another specific test this time, invalid",
|
||||||
|
"data": 15,
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
So a description, a schema, and some tests, where tests is an array containing
|
||||||
|
one or more objects with descriptions, data, and a boolean indicating whether
|
||||||
|
they should be valid or invalid.
|
||||||
|
|
||||||
|
Coverage
|
||||||
|
--------
|
||||||
|
|
||||||
|
Drafts 03, 04, 06, and 07 should have full coverage, with drafts 06 and 07
|
||||||
|
being considered current and actively supported. Bug fixes will be made as
|
||||||
|
needed for draft-04 as it is still the most widely used, while draft-03
|
||||||
|
is long since deprecated.
|
||||||
|
|
||||||
|
If you see anything missing from the current supported drafts, or incorrect
|
||||||
|
on any draft still accepting bug fixes, please file an issue or submit a PR.
|
||||||
|
|
||||||
|
Who Uses the Test Suite
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
This suite is being used by:
|
||||||
|
|
||||||
|
### Clojure ###
|
||||||
|
|
||||||
|
* [jinx](https://github.com/juxt/jinx)
|
||||||
|
* [json-schema](https://github.com/tatut/json-schema)
|
||||||
|
|
||||||
|
### Coffeescript ###
|
||||||
|
|
||||||
|
* [jsck](https://github.com/pandastrike/jsck)
|
||||||
|
|
||||||
|
### C++ ###
|
||||||
|
|
||||||
|
* [Modern C++ JSON schema validator](https://github.com/pboettch/json-schema-validator)
|
||||||
|
|
||||||
|
### Dart ###
|
||||||
|
|
||||||
|
* [json_schema](https://github.com/patefacio/json_schema)
|
||||||
|
|
||||||
|
### Elixir ###
|
||||||
|
|
||||||
|
* [ex_json_schema](https://github.com/jonasschmidt/ex_json_schema)
|
||||||
|
|
||||||
|
### Erlang ###
|
||||||
|
|
||||||
|
* [jesse](https://github.com/for-GET/jesse)
|
||||||
|
|
||||||
|
### Go ###
|
||||||
|
|
||||||
|
* [gojsonschema](https://github.com/sigu-399/gojsonschema)
|
||||||
|
* [validate-json](https://github.com/cesanta/validate-json)
|
||||||
|
|
||||||
|
### Haskell ###
|
||||||
|
|
||||||
|
* [aeson-schema](https://github.com/timjb/aeson-schema)
|
||||||
|
* [hjsonschema](https://github.com/seagreen/hjsonschema)
|
||||||
|
|
||||||
|
### Java ###
|
||||||
|
|
||||||
|
* [json-schema-validator](https://github.com/daveclayton/json-schema-validator)
|
||||||
|
* [everit-org/json-schema](https://github.com/everit-org/json-schema)
|
||||||
|
* [networknt/json-schema-validator](https://github.com/networknt/json-schema-validator)
|
||||||
|
* [Justify](https://github.com/leadpony/justify)
|
||||||
|
|
||||||
|
### JavaScript ###
|
||||||
|
|
||||||
|
* [json-schema-benchmark](https://github.com/Muscula/json-schema-benchmark)
|
||||||
|
* [direct-schema](https://github.com/IreneKnapp/direct-schema)
|
||||||
|
* [is-my-json-valid](https://github.com/mafintosh/is-my-json-valid)
|
||||||
|
* [jassi](https://github.com/iclanzan/jassi)
|
||||||
|
* [JaySchema](https://github.com/natesilva/jayschema)
|
||||||
|
* [json-schema-valid](https://github.com/ericgj/json-schema-valid)
|
||||||
|
* [Jsonary](https://github.com/jsonary-js/jsonary)
|
||||||
|
* [jsonschema](https://github.com/tdegrunt/jsonschema)
|
||||||
|
* [request-validator](https://github.com/bugventure/request-validator)
|
||||||
|
* [skeemas](https://github.com/Prestaul/skeemas)
|
||||||
|
* [tv4](https://github.com/geraintluff/tv4)
|
||||||
|
* [z-schema](https://github.com/zaggino/z-schema)
|
||||||
|
* [jsen](https://github.com/bugventure/jsen)
|
||||||
|
* [ajv](https://github.com/epoberezkin/ajv)
|
||||||
|
* [djv](https://github.com/korzio/djv)
|
||||||
|
|
||||||
|
### Node.js ###
|
||||||
|
|
||||||
|
For node.js developers, the suite is also available as an
|
||||||
|
[npm](https://www.npmjs.com/package/@json-schema-org/tests) package.
|
||||||
|
|
||||||
|
Node-specific support is maintained in a [separate
|
||||||
|
repository](https://github.com/json-schema-org/json-schema-test-suite-npm)
|
||||||
|
which also welcomes your contributions!
|
||||||
|
|
||||||
|
### .NET ###
|
||||||
|
|
||||||
|
* [Newtonsoft.Json.Schema](https://github.com/JamesNK/Newtonsoft.Json.Schema)
|
||||||
|
* [Manatee.Json](https://github.com/gregsdennis/Manatee.Json)
|
||||||
|
|
||||||
|
### PHP ###
|
||||||
|
|
||||||
|
* [json-schema](https://github.com/justinrainbow/json-schema)
|
||||||
|
* [json-guard](https://github.com/thephpleague/json-guard)
|
||||||
|
|
||||||
|
### PostgreSQL ###
|
||||||
|
|
||||||
|
* [postgres-json-schema](https://github.com/gavinwahl/postgres-json-schema)
|
||||||
|
* [is_jsonb_valid](https://github.com/furstenheim/is_jsonb_valid)
|
||||||
|
|
||||||
|
### Python ###
|
||||||
|
|
||||||
|
* [jsonschema](https://github.com/Julian/jsonschema)
|
||||||
|
* [fastjsonschema](https://github.com/seznam/python-fastjsonschema)
|
||||||
|
* [hypothesis-jsonschema](https://github.com/Zac-HD/hypothesis-jsonschema)
|
||||||
|
|
||||||
|
### Ruby ###
|
||||||
|
|
||||||
|
* [json-schema](https://github.com/hoxworth/json-schema)
|
||||||
|
* [json_schemer](https://github.com/davishmcclurg/json_schemer)
|
||||||
|
|
||||||
|
### Rust ###
|
||||||
|
|
||||||
|
* [valico](https://github.com/rustless/valico)
|
||||||
|
|
||||||
|
### Swift ###
|
||||||
|
|
||||||
|
* [JSONSchema](https://github.com/kylef/JSONSchema.swift)
|
||||||
|
|
||||||
|
If you use it as well, please fork and send a pull request adding yourself to
|
||||||
|
the list :).
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
If you see something missing or incorrect, a pull request is most welcome!
|
||||||
|
|
||||||
|
There are some sanity checks in place for testing the test suite. You can run
|
||||||
|
them with `bin/jsonschema_suite check && npm test` or `tox && npm test`. They will be run automatically by
|
||||||
|
[Travis CI](https://travis-ci.org/) as well.
|
|
@ -0,0 +1,298 @@
|
||||||
|
#! /usr/bin/env python3
|
||||||
|
from __future__ import print_function
|
||||||
|
from pprint import pformat
|
||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
import fnmatch
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
if getattr(unittest, "skipIf", None) is None:
|
||||||
|
unittest.skipIf = lambda cond, msg : lambda fn : fn
|
||||||
|
|
||||||
|
try:
|
||||||
|
import jsonschema
|
||||||
|
except ImportError:
|
||||||
|
jsonschema = None
|
||||||
|
else:
|
||||||
|
validators = getattr(
|
||||||
|
jsonschema.validators, "validators", jsonschema.validators
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ROOT_DIR = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), os.pardir).rstrip("__pycache__"),
|
||||||
|
)
|
||||||
|
SUITE_ROOT_DIR = os.path.join(ROOT_DIR, "tests")
|
||||||
|
|
||||||
|
REMOTES = {
|
||||||
|
"integer.json": {u"type": u"integer"},
|
||||||
|
"name.json": {
|
||||||
|
u"type": "string",
|
||||||
|
u"definitions": {
|
||||||
|
u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"name-defs.json": {
|
||||||
|
u"type": "string",
|
||||||
|
u"$defs": {
|
||||||
|
u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"subSchemas.json": {
|
||||||
|
u"integer": {u"type": u"integer"},
|
||||||
|
u"refToInteger": {u"$ref": u"#/integer"},
|
||||||
|
},
|
||||||
|
"folder/folderInteger.json": {u"type": u"integer"}
|
||||||
|
}
|
||||||
|
REMOTES_DIR = os.path.join(ROOT_DIR, "remotes")
|
||||||
|
|
||||||
|
with open(os.path.join(ROOT_DIR, "test-schema.json")) as schema:
|
||||||
|
TESTSUITE_SCHEMA = json.load(schema)
|
||||||
|
|
||||||
|
def files(paths):
|
||||||
|
for path in paths:
|
||||||
|
with open(path) as test_file:
|
||||||
|
yield json.load(test_file)
|
||||||
|
|
||||||
|
|
||||||
|
def groups(paths):
|
||||||
|
for test_file in files(paths):
|
||||||
|
for group in test_file:
|
||||||
|
yield group
|
||||||
|
|
||||||
|
|
||||||
|
def cases(paths):
|
||||||
|
for test_group in groups(paths):
|
||||||
|
for test in test_group["tests"]:
|
||||||
|
test["schema"] = test_group["schema"]
|
||||||
|
yield test
|
||||||
|
|
||||||
|
|
||||||
|
def collect(root_dir):
|
||||||
|
for root, dirs, files in os.walk(root_dir):
|
||||||
|
for filename in fnmatch.filter(files, "*.json"):
|
||||||
|
yield os.path.join(root, filename)
|
||||||
|
|
||||||
|
|
||||||
|
class SanityTests(unittest.TestCase):
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
print("Looking for tests in %s" % SUITE_ROOT_DIR)
|
||||||
|
cls.test_files = list(collect(SUITE_ROOT_DIR))
|
||||||
|
print("Found %s test files" % len(cls.test_files))
|
||||||
|
assert cls.test_files, "Didn't find the test files!"
|
||||||
|
|
||||||
|
def test_all_files_are_valid_json(self):
|
||||||
|
for path in self.test_files:
|
||||||
|
with open(path) as test_file:
|
||||||
|
try:
|
||||||
|
json.load(test_file)
|
||||||
|
except ValueError as error:
|
||||||
|
self.fail("%s contains invalid JSON (%s)" % (path, error))
|
||||||
|
|
||||||
|
def test_all_descriptions_have_reasonable_length(self):
|
||||||
|
for case in cases(self.test_files):
|
||||||
|
description = case["description"]
|
||||||
|
self.assertLess(
|
||||||
|
len(description),
|
||||||
|
70,
|
||||||
|
"%r is too long! (keep it to less than 70 chars)" % (
|
||||||
|
description,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_all_descriptions_are_unique(self):
|
||||||
|
for group in groups(self.test_files):
|
||||||
|
descriptions = set(test["description"] for test in group["tests"])
|
||||||
|
self.assertEqual(
|
||||||
|
len(descriptions),
|
||||||
|
len(group["tests"]),
|
||||||
|
"%r contains a duplicate description" % (group,)
|
||||||
|
)
|
||||||
|
|
||||||
|
@unittest.skipIf(jsonschema is None, "Validation library not present!")
|
||||||
|
def test_all_schemas_are_valid(self):
|
||||||
|
for schema in os.listdir(SUITE_ROOT_DIR):
|
||||||
|
schema_validator = validators.get(schema)
|
||||||
|
if schema_validator is not None:
|
||||||
|
test_files = collect(os.path.join(SUITE_ROOT_DIR, schema))
|
||||||
|
for case in cases(test_files):
|
||||||
|
try:
|
||||||
|
schema_validator.check_schema(case["schema"])
|
||||||
|
except jsonschema.SchemaError as error:
|
||||||
|
self.fail("%s contains an invalid schema (%s)" %
|
||||||
|
(case, error))
|
||||||
|
else:
|
||||||
|
warnings.warn("No schema validator for %s" % schema)
|
||||||
|
|
||||||
|
@unittest.skipIf(jsonschema is None, "Validation library not present!")
|
||||||
|
def test_suites_are_valid(self):
|
||||||
|
validator = jsonschema.Draft4Validator(TESTSUITE_SCHEMA)
|
||||||
|
for tests in files(self.test_files):
|
||||||
|
try:
|
||||||
|
validator.validate(tests)
|
||||||
|
except jsonschema.ValidationError as error:
|
||||||
|
self.fail(str(error))
|
||||||
|
|
||||||
|
def test_remote_schemas_are_updated(self):
|
||||||
|
files = {}
|
||||||
|
for parent, _, paths in os.walk(REMOTES_DIR):
|
||||||
|
for path in paths:
|
||||||
|
absolute_path = os.path.join(parent, path)
|
||||||
|
with open(absolute_path) as schema_file:
|
||||||
|
files[absolute_path] = json.load(schema_file)
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
os.path.join(REMOTES_DIR, path): contents
|
||||||
|
for path, contents in REMOTES.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
missing = set(files).symmetric_difference(expected)
|
||||||
|
changed = {
|
||||||
|
path
|
||||||
|
for path, contents in expected.items()
|
||||||
|
if path in files
|
||||||
|
and contents != files[path]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
files,
|
||||||
|
expected,
|
||||||
|
msg=textwrap.dedent(
|
||||||
|
"""
|
||||||
|
Remotes in the remotes/ directory do not match those in the
|
||||||
|
``jsonschema_suite`` Python script.
|
||||||
|
|
||||||
|
Unfortunately for the minute, each remote file is duplicated in
|
||||||
|
two places.""" + ("""
|
||||||
|
|
||||||
|
Only present in one location:
|
||||||
|
|
||||||
|
{}""".format("\n".join(missing)) if missing else "") + ("""
|
||||||
|
|
||||||
|
Conflicting between the two:
|
||||||
|
|
||||||
|
{}""".format("\n".join(changed)) if changed else "")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(arguments):
|
||||||
|
if arguments.command == "check":
|
||||||
|
suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests)
|
||||||
|
result = unittest.TextTestRunner(verbosity=2).run(suite)
|
||||||
|
sys.exit(not result.wasSuccessful())
|
||||||
|
elif arguments.command == "flatten":
|
||||||
|
selected_cases = [case for case in cases(collect(arguments.version))]
|
||||||
|
|
||||||
|
if arguments.randomize:
|
||||||
|
random.shuffle(selected_cases)
|
||||||
|
|
||||||
|
json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True)
|
||||||
|
elif arguments.command == "remotes":
|
||||||
|
json.dump(REMOTES, sys.stdout, indent=4, sort_keys=True)
|
||||||
|
elif arguments.command == "dump_remotes":
|
||||||
|
if arguments.update:
|
||||||
|
shutil.rmtree(arguments.out_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(arguments.out_dir)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST:
|
||||||
|
print("%s already exists. Aborting." % arguments.out_dir)
|
||||||
|
sys.exit(1)
|
||||||
|
raise
|
||||||
|
|
||||||
|
for url, schema in REMOTES.items():
|
||||||
|
filepath = os.path.join(arguments.out_dir, url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(filepath))
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
|
||||||
|
with open(filepath, "w") as out_file:
|
||||||
|
json.dump(schema, out_file, indent=4, sort_keys=True)
|
||||||
|
out_file.write("\n")
|
||||||
|
elif arguments.command == "serve":
|
||||||
|
try:
|
||||||
|
from flask import Flask, jsonify
|
||||||
|
except ImportError:
|
||||||
|
print(textwrap.dedent("""
|
||||||
|
The Flask library is required to serve the remote schemas.
|
||||||
|
|
||||||
|
You can install it by running `pip install Flask`.
|
||||||
|
|
||||||
|
Alternatively, see the `jsonschema_suite remotes` or
|
||||||
|
`jsonschema_suite dump_remotes` commands to create static files
|
||||||
|
that can be served with your own web server.
|
||||||
|
""".strip("\n")))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
@app.route("/<path:path>")
|
||||||
|
def serve_path(path):
|
||||||
|
if path in REMOTES:
|
||||||
|
return jsonify(REMOTES[path])
|
||||||
|
return "Document does not exist.", 404
|
||||||
|
|
||||||
|
app.run(port=1234)
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="JSON Schema Test Suite utilities",
|
||||||
|
)
|
||||||
|
subparsers = parser.add_subparsers(help="utility commands", dest="command")
|
||||||
|
|
||||||
|
check = subparsers.add_parser("check", help="Sanity check the test suite.")
|
||||||
|
|
||||||
|
flatten = subparsers.add_parser(
|
||||||
|
"flatten",
|
||||||
|
help="Output a flattened file containing a selected version's test cases."
|
||||||
|
)
|
||||||
|
flatten.add_argument(
|
||||||
|
"--randomize",
|
||||||
|
action="store_true",
|
||||||
|
help="Randomize the order of the outputted cases.",
|
||||||
|
)
|
||||||
|
flatten.add_argument(
|
||||||
|
"version", help="The directory containing the version to output",
|
||||||
|
)
|
||||||
|
|
||||||
|
remotes = subparsers.add_parser(
|
||||||
|
"remotes",
|
||||||
|
help="Output the expected URLs and their associated schemas for remote "
|
||||||
|
"ref tests as a JSON object."
|
||||||
|
)
|
||||||
|
|
||||||
|
dump_remotes = subparsers.add_parser(
|
||||||
|
"dump_remotes", help="Dump the remote ref schemas into a file tree",
|
||||||
|
)
|
||||||
|
dump_remotes.add_argument(
|
||||||
|
"--update",
|
||||||
|
action="store_true",
|
||||||
|
help="Update the remotes in an existing directory.",
|
||||||
|
)
|
||||||
|
dump_remotes.add_argument(
|
||||||
|
"--out-dir",
|
||||||
|
default=REMOTES_DIR,
|
||||||
|
type=os.path.abspath,
|
||||||
|
help="The output directory to create as the root of the file tree",
|
||||||
|
)
|
||||||
|
|
||||||
|
serve = subparsers.add_parser(
|
||||||
|
"serve",
|
||||||
|
help="Start a webserver to serve schemas used by remote ref tests."
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(parser.parse_args())
|
|
@ -0,0 +1,45 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const Ajv = require('ajv');
|
||||||
|
const jsonSchemaTest = require('json-schema-test');
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const refs = {
|
||||||
|
'http://localhost:1234/integer.json': require('./remotes/integer.json'),
|
||||||
|
'http://localhost:1234/subSchemas.json': require('./remotes/subSchemas.json'),
|
||||||
|
'http://localhost:1234/folder/folderInteger.json': require('./remotes/folder/folderInteger.json'),
|
||||||
|
'http://localhost:1234/name.json': require('./remotes/name.json'),
|
||||||
|
'http://localhost:1234/name-defs.json': require('./remotes/name-defs.json')
|
||||||
|
};
|
||||||
|
|
||||||
|
const SKIP = {
|
||||||
|
4: ['optional/zeroTerminatedFloats'],
|
||||||
|
7: [
|
||||||
|
'format/idn-email',
|
||||||
|
'format/idn-hostname',
|
||||||
|
'format/iri',
|
||||||
|
'format/iri-reference',
|
||||||
|
'optional/content'
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
[4, 6, 7].forEach((draft) => {
|
||||||
|
let ajv;
|
||||||
|
if (draft == 7) {
|
||||||
|
ajv = new Ajv({format: 'full'});
|
||||||
|
} else {
|
||||||
|
const schemaId = draft == 4 ? 'id' : '$id';
|
||||||
|
ajv = new Ajv({format: 'full', meta: false, schemaId});
|
||||||
|
ajv.addMetaSchema(require(`ajv/lib/refs/json-schema-draft-0${draft}.json`));
|
||||||
|
ajv._opts.defaultMeta = `http://json-schema.org/draft-0${draft}/schema#`;
|
||||||
|
}
|
||||||
|
for (const uri in refs) ajv.addSchema(refs[uri], uri);
|
||||||
|
|
||||||
|
jsonSchemaTest(ajv, {
|
||||||
|
description: `Test suite draft-0${draft}`,
|
||||||
|
suites: {tests: `./tests/draft${draft}/{**/,}*.json`},
|
||||||
|
skip: SKIP[draft],
|
||||||
|
cwd: __dirname,
|
||||||
|
hideFolder: 'tests/'
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"name": "json-schema-test-suite",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "A language agnostic test suite for the JSON Schema specifications",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha index.js -R spec"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/json-schema-org/JSON-Schema-Test-Suite.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"json-schema",
|
||||||
|
"tests"
|
||||||
|
],
|
||||||
|
"author": "http://json-schema.org",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/json-schema-org/JSON-Schema-Test-Suite/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/json-schema-org/JSON-Schema-Test-Suite#readme",
|
||||||
|
"devDependencies": {
|
||||||
|
"ajv": "^6.0.0-rc.1",
|
||||||
|
"json-schema-test": "^2.0.0",
|
||||||
|
"mocha": "^3.2.0"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"type": "integer"
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"type": "integer"
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"$defs": {
|
||||||
|
"orNull": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "string"
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"definitions": {
|
||||||
|
"orNull": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "string"
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"integer": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"refToInteger": {
|
||||||
|
"$ref": "#/integer"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"definitions": {
|
||||||
|
"outputItem": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"valid": {"type": "boolean"},
|
||||||
|
"keywordLocation": {"type": "string"},
|
||||||
|
"absoluteKeywordLocation": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "uri"
|
||||||
|
},
|
||||||
|
"instanceLocation": {"type": "string"},
|
||||||
|
"annotations": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"$ref": "#/definitions/outputItem"}
|
||||||
|
},
|
||||||
|
"errors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"$ref": "#/definitions/outputItem"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["description", "schema", "tests"],
|
||||||
|
"properties": {
|
||||||
|
"description": {"type": "string"},
|
||||||
|
"schema": {},
|
||||||
|
"tests": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["description", "data", "valid"],
|
||||||
|
"properties": {
|
||||||
|
"description": {"type": "string"},
|
||||||
|
"data": {},
|
||||||
|
"valid": {"type": "boolean"},
|
||||||
|
"output": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"basic": {"$ref": "#/definitions/outputItem"},
|
||||||
|
"detailed": {"$ref": "#/definitions/outputItem"},
|
||||||
|
"verbose": {"$ref": "#/definitions/outputItem"}
|
||||||
|
},
|
||||||
|
"required": ["basic", "detailed", "verbose"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
}
|
87
third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json
поставляемый
Normal file
87
third_party/python/jsonschema/json/tests/draft2019-09/additionalItems.json
поставляемый
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"description": "additionalItems as schema",
|
||||||
|
"schema": {
|
||||||
|
"items": [{}],
|
||||||
|
"additionalItems": {"type": "integer"}
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "additional items match schema",
|
||||||
|
"data": [ null, 2, 3, 4 ],
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additional items do not match schema",
|
||||||
|
"data": [ null, 2, 3, "foo" ],
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "items is schema, no additionalItems",
|
||||||
|
"schema": {
|
||||||
|
"items": {},
|
||||||
|
"additionalItems": false
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "all items match schema",
|
||||||
|
"data": [ 1, 2, 3, 4, 5 ],
|
||||||
|
"valid": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "array of items with no additionalItems",
|
||||||
|
"schema": {
|
||||||
|
"items": [{}, {}, {}],
|
||||||
|
"additionalItems": false
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "fewer number of items present",
|
||||||
|
"data": [ 1, 2 ],
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "equal number of items present",
|
||||||
|
"data": [ 1, 2, 3 ],
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additional items are not permitted",
|
||||||
|
"data": [ 1, 2, 3, 4 ],
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additionalItems as false without items",
|
||||||
|
"schema": {"additionalItems": false},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description":
|
||||||
|
"items defaults to empty schema so everything is valid",
|
||||||
|
"data": [ 1, 2, 3, 4, 5 ],
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "ignores non-arrays",
|
||||||
|
"data": {"foo" : "bar"},
|
||||||
|
"valid": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additionalItems are allowed by default",
|
||||||
|
"schema": {"items": [{"type": "integer"}]},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "only the first item is validated",
|
||||||
|
"data": [1, "foo", false],
|
||||||
|
"valid": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
133
third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json
поставляемый
Normal file
133
third_party/python/jsonschema/json/tests/draft2019-09/additionalProperties.json
поставляемый
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"description":
|
||||||
|
"additionalProperties being false does not allow other properties",
|
||||||
|
"schema": {
|
||||||
|
"properties": {"foo": {}, "bar": {}},
|
||||||
|
"patternProperties": { "^v": {} },
|
||||||
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "no additional properties is valid",
|
||||||
|
"data": {"foo": 1},
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "an additional property is invalid",
|
||||||
|
"data": {"foo" : 1, "bar" : 2, "quux" : "boom"},
|
||||||
|
"valid": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "ignores arrays",
|
||||||
|
"data": [1, 2, 3],
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "ignores strings",
|
||||||
|
"data": "foobarbaz",
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "ignores other non-objects",
|
||||||
|
"data": 12,
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "patternProperties are not additional properties",
|
||||||
|
"data": {"foo":1, "vroom": 2},
|
||||||
|
"valid": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "non-ASCII pattern with additionalProperties",
|
||||||
|
"schema": {
|
||||||
|
"patternProperties": {"^á": {}},
|
||||||
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "matching the pattern is valid",
|
||||||
|
"data": {"ármányos": 2},
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "not matching the pattern is invalid",
|
||||||
|
"data": {"élmény": 2},
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description":
|
||||||
|
"additionalProperties allows a schema which should validate",
|
||||||
|
"schema": {
|
||||||
|
"properties": {"foo": {}, "bar": {}},
|
||||||
|
"additionalProperties": {"type": "boolean"}
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "no additional properties is valid",
|
||||||
|
"data": {"foo": 1},
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "an additional valid property is valid",
|
||||||
|
"data": {"foo" : 1, "bar" : 2, "quux" : true},
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "an additional invalid property is invalid",
|
||||||
|
"data": {"foo" : 1, "bar" : 2, "quux" : 12},
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description":
|
||||||
|
"additionalProperties can exist by itself",
|
||||||
|
"schema": {
|
||||||
|
"additionalProperties": {"type": "boolean"}
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "an additional valid property is valid",
|
||||||
|
"data": {"foo" : true},
|
||||||
|
"valid": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "an additional invalid property is invalid",
|
||||||
|
"data": {"foo" : 1},
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additionalProperties are allowed by default",
|
||||||
|
"schema": {"properties": {"foo": {}, "bar": {}}},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "additional properties are allowed",
|
||||||
|
"data": {"foo": 1, "bar": 2, "quux": true},
|
||||||
|
"valid": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "additionalProperties should not look in applicators",
|
||||||
|
"schema": {
|
||||||
|
"allOf": [
|
||||||
|
{"properties": {"foo": {}}}
|
||||||
|
],
|
||||||
|
"additionalProperties": {"type": "boolean"}
|
||||||
|
},
|
||||||
|
"tests": [
|
||||||
|
{
|
||||||
|
"description": "properties defined in allOf are not allowed",
|
||||||
|
"data": {"foo": 1, "bar": true},
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче