Backed out 12 changesets (bug 1497898) for build bustages. CLOSED TREE

Backed out changeset 57877c614829 (bug 1497898)
Backed out changeset 22a06c8c8dc6 (bug 1497898)
Backed out changeset 7bba4d617db6 (bug 1497898)
Backed out changeset 3a9a7760db5c (bug 1497898)
Backed out changeset c482d18cc050 (bug 1497898)
Backed out changeset 2caa5633dea1 (bug 1497898)
Backed out changeset 48be184d5377 (bug 1497898)
Backed out changeset 184bc31c33a6 (bug 1497898)
Backed out changeset c3cb0408498c (bug 1497898)
Backed out changeset a6a89509add7 (bug 1497898)
Backed out changeset 9afac925aef8 (bug 1497898)
Backed out changeset 5e3b8ad4c8f4 (bug 1497898)
This commit is contained in:
Brindusan Cristian 2018-10-19 15:37:41 +03:00
Родитель 9fa028580e
Коммит 966d95816c
19 изменённых файлов: 378 добавлений и 903 удалений

Просмотреть файл

@ -2,11 +2,8 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
from mozboot.util import get_state_dir
def create_parser_wpt():
from wptrunner import wptcommandline
@ -47,5 +44,4 @@ class WebPlatformTestsRunner(object):
return manifestupdate.run(logger=logger,
src_root=self.setup.topsrcdir,
obj_root=self.setup.topobjdir,
cache_root=os.path.join(get_state_dir()[0], "cache", "wpt"),
**kwargs)

Просмотреть файл

@ -1,6 +1,5 @@
import ConfigParser
import argparse
import hashlib
import imp
import os
import sys
@ -106,8 +105,7 @@ def run(src_root, obj_root, logger=None, **kwargs):
logger.debug("Skipping manifest download")
if kwargs["update"] or kwargs["rebuild"]:
manifests = update(logger, src_wpt_dir, test_paths, rebuild=kwargs["rebuild"],
cache_root=kwargs["cache_root"])
manifests = update(logger, src_wpt_dir, test_paths, rebuild=kwargs["rebuild"])
else:
logger.debug("Skipping manifest update")
manifests = load_manifests(test_paths)
@ -166,17 +164,24 @@ def generate_config(logger, repo_root, wpt_dir, dest_path, force_rewrite=False):
return dest_config_path
def update(logger, wpt_dir, test_paths, rebuild=False, config_dir=None, cache_root=None):
def update(logger, wpt_dir, test_paths, rebuild=False, config_dir=None):
rv = {}
wptdir_hash = hashlib.sha256(os.path.abspath(wpt_dir)).hexdigest()
for url_base, paths in test_paths.iteritems():
m = None
manifest_path = paths["manifest_path"]
this_cache_root = os.path.join(cache_root, wptdir_hash, os.path.dirname(paths["manifest_rel_path"]))
m = manifest.manifest.load_and_update(paths["tests_path"],
manifest_path,
url_base,
working_copy=True,
cache_root=this_cache_root)
if not rebuild and os.path.exists(manifest_path):
logger.info("Updating manifest %s" % manifest_path)
try:
m = manifest.manifest.load(paths["tests_path"], manifest_path)
except manifest.manifest.ManifestVersionMismatch:
logger.info("Manifest format changed, rebuilding")
if m is None:
logger.info("Recreating manifest %s" % manifest_path)
m = manifest.manifest.Manifest(url_base)
manifest.update.update(paths["tests_path"], m, working_copy=True)
manifest.manifest.write(m, manifest_path)
path_data = {"url_base": url_base}
path_data.update(paths)
rv[m] = path_data

10
testing/web-platform/tests/.gitignore поставляемый
Просмотреть файл

@ -5,15 +5,19 @@ _venv/
.cache/
.pytest_cache/
.tox/
.coverage*
# Node
node_modules/
# WPT repo stuff
/MANIFEST.json
.wptcache/
/config.json
testharness_runner.html
!/testharness_runner.html
!/tools/wptrunner/wptrunner/testharness_runner.html
_certs
config.json
# Various OS/editor specific files
*#

Просмотреть файл

@ -19,7 +19,6 @@ test_infrastructure() {
main() {
PRODUCTS=( "firefox" "chrome" )
./wpt manifest --rebuild -p ~/meta/MANIFEST.json
for PRODUCT in "${PRODUCTS[@]}"; do
if [ "$PRODUCT" != "firefox" ]; then
# Firefox is expected to work using pref settings for DNS

Просмотреть файл

@ -1,29 +1,21 @@
import re
import os
import itertools
from six import itervalues, iteritems
from collections import defaultdict
end_space = re.compile(r"([^\\]\s)*$")
def fnmatch_translate(pat):
def fnmatch_translate(pat, path_name=False):
parts = []
seq = None
seq = False
i = 0
any_char = "[^/]"
if pat[0] == "/":
if pat[0] == "/" or path_name:
parts.append("^")
pat = pat[1:]
any_char = "[^/]"
if pat[0] == "/":
pat = pat[1:]
else:
# By default match the entire path up to a /
# but if / doesn't appear in the pattern we will mark is as
# a name pattern and just produce a pattern that matches against
# the filename
any_char = "."
parts.append("^(?:.*/)?")
name_pattern = True
if pat[-1] == "/":
# If the last character is / match this directory or any subdirectory
pat = pat[:-1]
@ -39,10 +31,11 @@ def fnmatch_translate(pat):
parts.append(re.escape(c))
else:
raise ValueError
elif seq is not None:
# TODO: this doesn't really handle invalid sequences in the right way
elif seq:
if c == "]":
seq = None
seq = False
# First two cases are to deal with the case where / is the only character
# in the sequence but path_name is True so it shouldn't match anything
if parts[-1] == "[":
parts = parts[:-1]
elif parts[-1] == "^" and parts[-2] == "[":
@ -51,46 +44,35 @@ def fnmatch_translate(pat):
parts.append(c)
elif c == "-":
parts.append(c)
else:
elif not (path_name and c == "/"):
parts += re.escape(c)
elif c == "[":
parts.append("[")
if i < len(pat) - 1 and pat[i+1] in ("!", "^"):
parts.append("^")
i += 1
seq = i
seq = True
elif c == "*":
if i < len(pat) - 1 and pat[i+1] == "*":
if i > 0 and pat[i-1] != "/":
raise ValueError
parts.append(".*")
parts.append(any_char + "*")
i += 1
if i < len(pat) - 1 and pat[i+1] != "/":
if i < len(pat) - 1 and pat[i+1] == "*":
raise ValueError
else:
parts.append(any_char + "*")
elif c == "?":
parts.append(any_char)
elif c == "/" and not seq:
name_pattern = False
parts.append(c)
else:
parts.append(re.escape(c))
i += 1
if name_pattern:
parts[0] = "^"
if seq is not None:
if seq:
raise ValueError
parts.append(suffix)
try:
return name_pattern, re.compile("".join(parts))
return re.compile("".join(parts))
except Exception:
raise ValueError
# Regexp matching rules that have to be converted to patterns
pattern_re = re.compile(r".*[\*\[\?]")
raise
def parse_line(line):
@ -107,19 +89,11 @@ def parse_line(line):
if dir_only:
line = line[:-1]
# Could make a special case for **/foo, but we don't have any patterns like that
if not invert and not pattern_re.match(line):
literal = True
pattern = tuple(line.rsplit("/", 1))
else:
pattern = fnmatch_translate(line)
literal = False
return invert, dir_only, literal, pattern
return invert, dir_only, fnmatch_translate(line, dir_only)
class PathFilter(object):
def __init__(self, root, extras=None, cache=None):
def __init__(self, root, extras=None):
if root:
ignore_path = os.path.join(root, ".gitignore")
else:
@ -129,123 +103,51 @@ class PathFilter(object):
return
self.trivial = False
self.literals_file = defaultdict(dict)
self.literals_dir = defaultdict(dict)
self.patterns_file = []
self.patterns_dir = []
self.cache = cache or {}
self.rules_file = []
self.rules_dir = []
if extras is None:
extras = []
if ignore_path and os.path.exists(ignore_path):
args = ignore_path, extras
else:
args = None, extras
self._read_ignore(*args)
self._read_ignore(ignore_path)
def _read_ignore(self, ignore_path, extras):
if ignore_path is not None:
with open(ignore_path) as f:
for line in f:
self._read_line(line)
for line in extras:
self._read_line(line)
for item in extras:
self._read_line(item)
def _read_ignore(self, ignore_path):
with open(ignore_path) as f:
for line in f:
self._read_line(line)
def _read_line(self, line):
parsed = parse_line(line)
if not parsed:
return
invert, dir_only, literal, rule = parsed
if invert:
# For exclude rules, we attach the rules to all preceeding patterns, so
# that we can match patterns out of order and check if they were later
# overriden by an exclude rule
assert not literal
if not dir_only:
rules_iter = itertools.chain(
itertools.chain(*(iteritems(item) for item in itervalues(self.literals_dir))),
itertools.chain(*(iteritems(item) for item in itervalues(self.literals_file))),
self.patterns_dir,
self.patterns_file)
else:
rules_iter = itertools.chain(
itertools.chain(*(iteritems(item) for item in itervalues(self.literals_dir))),
self.patterns_dir)
for rules in rules_iter:
rules[1].append(rule)
invert, dir_only, regexp = parsed
if dir_only:
self.rules_dir.append((regexp, invert))
else:
if literal:
if len(rule) == 1:
dir_name, pattern = None, rule[0]
else:
dir_name, pattern = rule
self.literals_dir[dir_name][pattern] = []
if not dir_only:
self.literals_file[dir_name][pattern] = []
else:
self.patterns_dir.append((rule, []))
if not dir_only:
self.patterns_file.append((rule, []))
self.rules_file.append((regexp, invert))
def filter(self, iterator):
empty = {}
for dirpath, dirnames, filenames in iterator:
orig_dirpath = dirpath
if os.path.sep != "/":
dirpath = dirpath.replace(os.path.sep, "/")
def __call__(self, path):
if os.path.sep != "/":
path = path.replace(os.path.sep, "/")
keep_dirs = []
keep_files = []
for iter_items, literals, patterns, target, suffix in [
(dirnames, self.literals_dir, self.patterns_dir, keep_dirs, "/"),
(filenames, self.literals_file, self.patterns_file, keep_files, "")]:
for item in iter_items:
name = item[0]
if dirpath:
path = "%s/%s" % (dirpath, name) + suffix
else:
path = name + suffix
if path in self.cache:
if not self.cache[path]:
target.append(item)
continue
for rule_dir in [None, dirpath]:
if name in literals.get(rule_dir, empty):
exclude = literals[rule_dir][name]
if not any(rule.match(path) for rule in exclude):
# Skip this item
self.cache[path] = True
break
else:
for (component_only, pattern), exclude in patterns:
if component_only:
match = pattern.match(name)
else:
match = pattern.match(path)
if match:
if not any(rule.match(name if name_only else path)
for name_only, rule in exclude):
# Skip this item
self.cache[path] = True
break
else:
self.cache[path] = False
target.append(item)
dirnames[:] = keep_dirs
assert ".git" not in dirnames
yield orig_dirpath, dirnames, keep_files
def __call__(self, iterator):
if self.trivial:
return iterator
return True
return self.filter(iterator)
path_is_dir = path[-1] == "/"
if path_is_dir:
path = path[:-1]
rules = self.rules_dir
else:
rules = self.rules_file
def has_ignore(dirpath):
return os.path.exists(os.path.join(dirpath, ".gitignore"))
include = True
for regexp, invert in rules:
if not include and invert and regexp.match(path):
include = True
elif include and not invert and regexp.match(path):
include = False
return include

Просмотреть файл

@ -3,98 +3,80 @@ import pytest
from ..gitignore import fnmatch_translate, PathFilter
match_data = [
("foo", True, ["a/foo", "foo"]),
("*.a", True, ["foo.a", "a/foo.a", "a/b/foo.a", "a.a/foo.a"]),
("*.py[co]", True, ["a.pyc", "a.pyo", "a/b/c.pyc"]),
("\\#*", True, ["#a", "a/#b"]),
("*#", True, ["a#", "a/b#", "#a#"]),
("/*.c", True, ["a.c", ".c"]),
("foo", False, ["a/foo", "foo"]),
("*.a", False, ["foo.a", "a/foo.a", "a/b/foo.a", "a.a/foo.a"]),
("*.py[co]", False, ["a.pyc", "a.pyo", "a/b/c.pyc"]),
("\\#*", False, ["#a", "a/#b"]),
("*#", False, ["a#", "a/b#", "#a#"]),
("/*.c", False, ["a.c", ".c"]),
("**/b", False, ["a/b", "a/c/b"]),
("*b", True, ["ab"]),
("*b", True, ["a/b"]),
("**/b", False, ["a/b"]),
("a/", True, ["a"]),
("a[/]b", True, []),
("**/b", False, ["a/c/b"]),
("a?c", True, ["abc"]),
("a[^b]c", True, ["acc"]),
("a[b-c]c", True, ["abc", "acc"]),
("a[^]c", True, ["ac"]), # This is probably wrong
("a[^]c", True, ["ac"]), # This is probably wrong
("**/b", True, ["a/b"]),
("a/", True, ["a", "a/b", "a/b/c"])
]
mismatch_data = [
("foo", True, ["foob", "afoo"]),
("*.a", True, ["a", "foo:a", "a.a/foo"]),
("*.py[co]", True, ["a.pyd", "pyo", "a.py"]),
("a", True, ["ab"]),
("a?c", True, ["ac", "abbc"]),
("a[^b]c", True, ["abc"]),
("a[b-c]c", True, ["adc"]),
("foo", False, ["foob", "afoo"]),
("*.a", False, ["a", "foo:a", "a.a/foo"]),
("*.py[co]", False, ["a.pyd", "pyo"]),
("/*.c", False, ["a/b.c"]),
("*b", True, ["a/b"]),
("**b", True, ["a/b"]),
("a[/]b", True, ["a/b"]),
("**/b", True, ["a/c/b"]),
("a", True, ["ab"])
]
invalid_data = [
"[a",
"***/foo",
"a\\",
"**b",
"b**/",
"[[]"
]
filter_data = [
(["foo", "bar/", "/a", "*.py"],
[("", ["foo", "bar", "baz"], ["a"]),
("baz", ["a"], ["foo", "bar"])],
[(["baz"], []),
(["a"], ["bar"])]),
(["#foo", "", "a*", "!a.py"],
[("", ["foo"], ["a", "a.foo", "a.py"])],
[(["foo"], ["a.py"])]),
("foo", True),
("a", False),
("a/b", False),
("a/c", True),
("a/c/", False),
("c/b", True)
]
def expand_data(compact_data):
for pattern, name_only, inputs in compact_data:
for pattern, path_name, inputs in compact_data:
for input in inputs:
yield pattern, name_only, input
yield pattern, input, path_name
@pytest.mark.parametrize("pattern, name_only, input", expand_data(match_data))
def tests_match(pattern, name_only, input):
name_only_result, regexp = fnmatch_translate(pattern)
assert name_only_result == name_only
if name_only:
input = input.rsplit("/", 1)[-1]
@pytest.mark.parametrize("pattern, input, path_name", expand_data(match_data))
def tests_match(pattern, input, path_name):
regexp = fnmatch_translate(pattern, path_name)
assert regexp.match(input) is not None
@pytest.mark.parametrize("pattern, name_only, input", expand_data(mismatch_data))
def tests_no_match(pattern, name_only, input):
name_only_result, regexp = fnmatch_translate(pattern)
assert name_only_result == name_only
if name_only:
input = input.rsplit("/", 1)[-1]
@pytest.mark.parametrize("pattern, input, path_name", expand_data(mismatch_data))
def tests_no_match(pattern, input, path_name):
regexp = fnmatch_translate(pattern, path_name)
assert regexp.match(input) is None
@pytest.mark.parametrize("pattern", invalid_data)
def tests_invalid(pattern):
with pytest.raises(ValueError):
fnmatch_translate(pattern)
fnmatch_translate(pattern, False)
with pytest.raises(ValueError):
fnmatch_translate(pattern, True)
@pytest.mark.parametrize("rules, input, expected", filter_data)
def test_path_filter(rules, input, expected):
f = PathFilter(None, rules)
# Add some fake stat data
for i, item in enumerate(input):
repl = [input[i][0]]
for j in [1, 2]:
repl.append([(name, None) for name in input[i][j]])
input[i] = tuple(repl)
for i, output in enumerate(f(input)):
assert output[0] == input[i][0]
for j in [1, 2]:
assert [item[0] for item in output[j]] == expected[i][j-1]
@pytest.mark.parametrize("path, expected", filter_data)
def test_path_filter(path, expected):
extras = [
"#foo",
"a ",
"**/b",
"a/c/",
"!c/b",
]
f = PathFilter(None, extras)
assert f(path) == expected

Просмотреть файл

@ -16,7 +16,6 @@ from . import fnmatch
from .. import localpaths
from ..gitignore.gitignore import PathFilter
from ..wpt import testfiles
from ..manifest.vcs import walk
from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants, get_default_any_variants
from six import binary_type, iteritems, itervalues
@ -68,11 +67,14 @@ def all_filesystem_paths(repo_root, subdir=None):
expanded_path = subdir
else:
expanded_path = repo_root
for dirpath, dirnames, filenames in path_filter(walk(expanded_path)):
for filename, _ in filenames:
path = os.path.join(dirpath, filename)
yield path
for dirpath, dirnames, filenames in os.walk(expanded_path):
for filename in filenames:
path = os.path.relpath(os.path.join(dirpath, filename), repo_root)
if path_filter(path):
yield path
dirnames[:] = [item for item in dirnames if
path_filter(os.path.relpath(os.path.join(dirpath, item) + "/",
repo_root)+"/")]
def _all_files_equal(paths):
"""

Просмотреть файл

@ -401,13 +401,13 @@ def test_check_css_globally_unique_ignored_dir(caplog):
def test_all_filesystem_paths():
with mock.patch(
'tools.lint.lint.walk',
return_value=[('',
[('dir_a', None), ('dir_b', None)],
[('file_a', None), ('file_b', None)]),
('dir_a',
'os.walk',
return_value=[('.',
['dir_a', 'dir_b'],
['file_a', 'file_b']),
(os.path.join('.', 'dir_a'),
[],
[('file_c', None), ('file_d', None)])]
['file_c', 'file_d'])]
):
got = list(lint_mod.all_filesystem_paths('.'))
assert got == ['file_a',

Просмотреть файл

@ -2,4 +2,5 @@
{"path": "update.py", "script": "run", "parser": "create_parser", "help": "Update the MANIFEST.json file",
"virtualenv": false},
"manifest-download":
{"path": "download.py", "script": "run", "parser": "create_parser", "help": "Download recent pregenerated MANIFEST.json file", "virtualenv": false}}
{"path": "download.py", "script": "run", "parser": "create_parser", "help": "Download recent pregenerated MANIFEST.json file",
"virtualenv": false}}

Просмотреть файл

@ -2,21 +2,19 @@ from six.moves.urllib.parse import urljoin, urlparse
from abc import ABCMeta, abstractproperty
class SourceFileCache(object):
def __init__(self):
self.source_files = {}
def make_new(self, tests_root, path, url_base):
def get_source_file(source_files, tests_root, manifest, path):
def make_new():
from .sourcefile import SourceFile
return SourceFile(tests_root, path, url_base)
return SourceFile(tests_root, path, manifest.url_base)
def get(self, tests_root, manifest, path):
if source_files is None:
return make_new()
if path not in self.source_files:
self.source_files[path] = self.make_new(tests_root, path, manifest.url_base)
if path not in source_files:
source_files[path] = make_new()
return self.source_files[path]
return source_files[path]
item_types = {}
@ -39,9 +37,8 @@ class ManifestItem(object):
item_type = None
source_file_cache = SourceFileCache()
def __init__(self, source_file, manifest=None):
self.manifest = manifest
self.source_file = source_file
@abstractproperty
@ -87,8 +84,8 @@ class ManifestItem(object):
return [{}]
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
return cls(source_file,
manifest=manifest)
@ -116,8 +113,8 @@ class URLManifestItem(ManifestItem):
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, extras = obj
return cls(source_file,
url,
@ -148,8 +145,8 @@ class TestharnessTest(URLManifestItem):
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, extras = obj
return cls(source_file,
@ -190,8 +187,8 @@ class RefTestNode(URLManifestItem):
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, references, extras = obj
return cls(source_file,
url,
@ -251,8 +248,8 @@ class WebDriverSpecTest(URLManifestItem):
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj):
source_file = cls.source_file_cache.get(tests_root, manifest, path)
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, extras = obj
return cls(source_file,

Просмотреть файл

@ -1,18 +1,13 @@
import itertools
import json
import os
from collections import defaultdict
from six import iteritems, iterkeys, itervalues, string_types
from six import iteritems, itervalues, viewkeys, string_types
from . import vcs
from .item import (ManualTest, WebDriverSpecTest, Stub, RefTestNode, RefTest,
TestharnessTest, SupportFile, ConformanceCheckerTest, VisualTest)
from .item import ManualTest, WebDriverSpecTest, Stub, RefTestNode, RefTest, TestharnessTest, SupportFile, ConformanceCheckerTest, VisualTest
from .log import get_logger
from .utils import from_os_path, to_os_path
try:
import ujson as json
except ImportError:
import json
CURRENT_VERSION = 5
@ -32,173 +27,11 @@ def iterfilter(filters, iter):
yield item
item_classes = {"testharness": TestharnessTest,
"reftest": RefTest,
"reftest_node": RefTestNode,
"manual": ManualTest,
"stub": Stub,
"wdspec": WebDriverSpecTest,
"conformancechecker": ConformanceCheckerTest,
"visual": VisualTest,
"support": SupportFile}
class TypeData(object):
def __init__(self, manifest, type_cls, meta_filters):
"""Dict-like object containing the TestItems for each test type.
Loading an actual Item class for each test is unnecessarily
slow, so this class allows lazy-loading of the test
items. When the manifest is loaded we store the raw json
corresponding to the test type, and only create an Item
subclass when the test is accessed. In order to remain
API-compatible with consumers that depend on getting an Item
from iteration, we do egerly load all items when iterating
over the class."""
self.manifest = manifest
self.type_cls = type_cls
self.json_data = {}
self.tests_root = None
self.data = {}
self.meta_filters = meta_filters or []
def __getitem__(self, key):
if key not in self.data:
self.load(key)
return self.data[key]
def __bool__(self):
return bool(self.data)
def __len__(self):
rv = len(self.data)
if self.json_data is not None:
rv += len(self.json_data)
return rv
def __delitem__(self, key):
del self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def __contains__(self, key):
self.load_all()
return key in self.data
def __iter__(self):
self.load_all()
return self.data.__iter__()
def pop(self, key, default=None):
try:
value = self[key]
except ValueError:
value = default
else:
del self.data[key]
return value
def get(self, key, default=None):
try:
return self[key]
except ValueError:
return default
def itervalues(self):
self.load_all()
return itervalues(self.data)
def iteritems(self):
self.load_all()
return iteritems(self.data)
def values(self):
return self.itervalues()
def items(self):
return self.iteritems()
def load(self, key):
"""Load a specific Item given a path"""
if self.json_data is not None:
data = set()
path = from_os_path(key)
for test in iterfilter(self.meta_filters, self.json_data.get(path, [])):
manifest_item = self.type_cls.from_json(self.manifest,
self.tests_root,
path,
test)
data.add(manifest_item)
try:
del self.json_data[path]
except KeyError:
pass
self.data[key] = data
else:
raise ValueError
def load_all(self):
"""Load all test items in this class"""
if self.json_data is not None:
for path, value in iteritems(self.json_data):
key = to_os_path(path)
if key in self.data:
continue
data = set()
for test in iterfilter(self.meta_filters, self.json_data.get(path, [])):
manifest_item = self.type_cls.from_json(self.manifest,
self.tests_root,
path,
test)
data.add(manifest_item)
self.data[key] = data
self.json_data = None
def set_json(self, tests_root, data):
if not isinstance(data, dict):
raise ValueError("Got a %s expected a dict" % (type(data)))
self.tests_root = tests_root
self.json_data = data
def paths(self):
"""Get a list of all paths containing items of this type,
without actually constructing all the items"""
rv = set(iterkeys(self.data))
if self.json_data:
rv |= set(to_os_path(item) for item in iterkeys(self.json_data))
return rv
class ManifestData(dict):
def __init__(self, manifest, meta_filters=None):
"""Dictionary subclass containing a TypeData instance for each test type,
keyed by type name"""
self.initialized = False
for key, value in iteritems(item_classes):
self[key] = TypeData(manifest, value, meta_filters=meta_filters)
self.initialized = True
self.json_obj = None
def __setitem__(self, key, value):
if self.initialized:
raise AttributeError
dict.__setitem__(self, key, value)
def paths(self):
"""Get a list of all paths containing test items
without actually constructing all the items"""
rv = set()
for item_data in itervalues(self):
rv |= set(item_data.paths())
return rv
class Manifest(object):
def __init__(self, url_base="/", meta_filters=None):
def __init__(self, url_base="/"):
assert url_base is not None
self._path_hash = {}
self._data = ManifestData(self, meta_filters)
self._data = defaultdict(dict)
self._reftest_nodes_by_url = None
self.url_base = url_base
@ -209,8 +42,7 @@ class Manifest(object):
if not types:
types = sorted(self._data.keys())
for item_type in types:
for path in sorted(self._data[item_type]):
tests = self._data[item_type][path]
for path, tests in sorted(iteritems(self._data[item_type])):
yield item_type, path, tests
def iterpath(self, path):
@ -242,86 +74,61 @@ class Manifest(object):
return self.reftest_nodes_by_url.get(url)
def update(self, tree):
"""Update the manifest given an iterable of items that make up the updated manifest.
new_data = defaultdict(dict)
new_hashes = {}
The iterable must either generate tuples of the form (SourceFile, True) for paths
that are to be updated, or (path, False) for items that are not to be updated. This
unusual API is designed as an optimistaion meaning that SourceFile items need not be
constructed in the case we are not updating a path, but the absence of an item from
the iterator may be used to remove defunct entries from the manifest."""
reftest_nodes = []
seen_files = set()
old_files = defaultdict(set, {k: set(viewkeys(v)) for k, v in iteritems(self._data)})
changed = False
reftest_changes = False
prev_files = self._data.paths()
for source_file in tree:
rel_path = source_file.rel_path
file_hash = source_file.hash
reftest_types = ("reftest", "reftest_node")
is_new = rel_path not in self._path_hash
hash_changed = False
for source_file, update in tree:
if not update:
rel_path = source_file
seen_files.add(rel_path)
else:
rel_path = source_file.rel_path
seen_files.add(rel_path)
file_hash = source_file.hash
is_new = rel_path not in self._path_hash
hash_changed = False
if not is_new:
old_hash, old_type = self._path_hash[rel_path]
if old_hash != file_hash:
new_type, manifest_items = source_file.manifest_items()
hash_changed = True
else:
new_type, manifest_items = old_type, self._data[old_type][rel_path]
if old_type in reftest_types and new_type != old_type:
reftest_changes = True
else:
if not is_new:
old_hash, old_type = self._path_hash[rel_path]
old_files[old_type].remove(rel_path)
if old_hash != file_hash:
new_type, manifest_items = source_file.manifest_items()
if new_type in ("reftest", "reftest_node"):
reftest_nodes.extend(manifest_items)
if is_new or hash_changed:
reftest_changes = True
elif new_type:
self._data[new_type][rel_path] = set(manifest_items)
self._path_hash[rel_path] = (file_hash, new_type)
if is_new or hash_changed:
changed = True
deleted = prev_files - seen_files
if deleted:
changed = True
for rel_path in deleted:
if rel_path in self._path_hash:
_, old_type = self._path_hash[rel_path]
if old_type in reftest_types:
reftest_changes = True
try:
del self._path_hash[rel_path]
except KeyError:
pass
try:
del self._data[old_type][rel_path]
except KeyError:
pass
hash_changed = True
else:
for test_data in itervalues(self._data):
if rel_path in test_data:
del test_data[rel_path]
new_type, manifest_items = old_type, self._data[old_type][rel_path]
if old_type in ("reftest", "reftest_node") and new_type != old_type:
reftest_changes = True
else:
new_type, manifest_items = source_file.manifest_items()
if reftest_changes:
if new_type in ("reftest", "reftest_node"):
reftest_nodes.extend(manifest_items)
if is_new or hash_changed:
reftest_changes = True
elif new_type:
new_data[new_type][rel_path] = set(manifest_items)
new_hashes[rel_path] = (file_hash, new_type)
if is_new or hash_changed:
changed = True
if reftest_changes or old_files["reftest"] or old_files["reftest_node"]:
reftests, reftest_nodes, changed_hashes = self._compute_reftests(reftest_nodes)
self._data["reftest"].data = reftests
self._data["reftest_node"].data = reftest_nodes
self._path_hash.update(changed_hashes)
new_data["reftest"] = reftests
new_data["reftest_node"] = reftest_nodes
new_hashes.update(changed_hashes)
else:
new_data["reftest"] = self._data["reftest"]
new_data["reftest_node"] = self._data["reftest_node"]
if any(itervalues(old_files)):
changed = True
self._data = new_data
self._path_hash = new_hashes
return changed
@ -361,7 +168,7 @@ class Manifest(object):
[t for t in sorted(test.to_json() for test in tests)]
for path, tests in iteritems(type_paths)
}
for test_type, type_paths in iteritems(self._data) if type_paths
for test_type, type_paths in iteritems(self._data)
}
rv = {"url_base": self.url_base,
"paths": {from_os_path(k): v for k, v in iteritems(self._path_hash)},
@ -375,12 +182,26 @@ class Manifest(object):
if version != CURRENT_VERSION:
raise ManifestVersionMismatch
self = cls(url_base=obj.get("url_base", "/"), meta_filters=meta_filters)
self = cls(url_base=obj.get("url_base", "/"))
if not hasattr(obj, "items") and hasattr(obj, "paths"):
raise ManifestError
self._path_hash = {to_os_path(k): v for k, v in iteritems(obj["paths"])}
item_classes = {"testharness": TestharnessTest,
"reftest": RefTest,
"reftest_node": RefTestNode,
"manual": ManualTest,
"stub": Stub,
"wdspec": WebDriverSpecTest,
"conformancechecker": ConformanceCheckerTest,
"visual": VisualTest,
"support": SupportFile}
meta_filters = meta_filters or []
source_files = {}
for test_type, type_paths in iteritems(obj["items"]):
if test_type not in item_classes:
raise ManifestError
@ -388,7 +209,18 @@ class Manifest(object):
if types and test_type not in types:
continue
self._data[test_type].set_json(tests_root, type_paths)
test_cls = item_classes[test_type]
tests = defaultdict(set)
for path, manifest_tests in iteritems(type_paths):
path = to_os_path(path)
for test in iterfilter(meta_filters, manifest_tests):
manifest_item = test_cls.from_json(self,
tests_root,
path,
test,
source_files=source_files)
tests[path].add(manifest_item)
self._data[test_type] = tests
return self
@ -396,11 +228,6 @@ class Manifest(object):
def load(tests_root, manifest, types=None, meta_filters=None):
logger = get_logger()
logger.warning("Prefer load_and_update instead")
return _load(logger, tests_root, manifest, types, meta_filters)
def _load(logger, tests_root, manifest, types=None, meta_filters=None):
# "manifest" is a path or file-like object.
if isinstance(manifest, string_types):
if os.path.exists(manifest):
@ -409,10 +236,7 @@ def _load(logger, tests_root, manifest, types=None, meta_filters=None):
logger.debug("Creating new manifest at %s" % manifest)
try:
with open(manifest) as f:
rv = Manifest.from_json(tests_root,
json.load(f),
types=types,
meta_filters=meta_filters)
rv = Manifest.from_json(tests_root, json.load(f), types=types, meta_filters=meta_filters)
except IOError:
return None
except ValueError:
@ -420,52 +244,7 @@ def _load(logger, tests_root, manifest, types=None, meta_filters=None):
return None
return rv
return Manifest.from_json(tests_root,
json.load(manifest),
types=types,
meta_filters=meta_filters)
def load_and_update(tests_root,
manifest_path,
url_base,
update=True,
rebuild=False,
metadata_path=None,
cache_root=None,
working_copy=False,
types=None,
meta_filters=None,
write_manifest=True):
logger = get_logger()
manifest = None
if not rebuild:
try:
manifest = _load(logger,
tests_root,
manifest_path,
types=types,
meta_filters=meta_filters)
except ManifestVersionMismatch:
logger.info("Manifest version changed, rebuilding")
if manifest is not None and manifest.url_base != url_base:
logger.info("Manifest url base did not match, rebuilding")
if manifest is None:
manifest = Manifest(url_base, meta_filters=meta_filters)
update = True
if update:
tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
working_copy, rebuild)
changed = manifest.update(tree)
if write_manifest and changed:
write(manifest, manifest_path)
tree.dump_caches()
return manifest
return Manifest.from_json(tests_root, json.load(manifest), types=types, meta_filters=meta_filters)
def write(manifest, manifest_path):
@ -473,5 +252,5 @@ def write(manifest, manifest_path):
if not os.path.exists(dir_name):
os.makedirs(dir_name)
with open(manifest_path, "wb") as f:
json.dump(manifest.to_json(), f, sort_keys=True, indent=1)
json.dump(manifest.to_json(), f, sort_keys=True, indent=1, separators=(',', ': '))
f.write("\n")

Просмотреть файл

@ -72,7 +72,7 @@ def sourcefile_strategy(draw):
def test_manifest_to_json(s):
m = manifest.Manifest()
assert m.update((item, True) for item in s) is True
assert m.update(s) is True
json_str = m.to_json()
loaded = manifest.Manifest.from_json("/", json_str)
@ -90,11 +90,11 @@ def test_manifest_to_json(s):
def test_manifest_idempotent(s):
m = manifest.Manifest()
assert m.update((item, True) for item in s) is True
assert m.update(s) is True
m1 = list(m)
assert m.update((item, True) for item in s) is False
assert m.update(s) is False
assert list(m) == m1
@ -104,7 +104,7 @@ def test_manifest_to_json_forwardslash():
s = SourceFileWithTest("a/b", "0"*40, item.TestharnessTest)
assert m.update([(s, True)]) is True
assert m.update([s]) is True
assert m.to_json() == {
'paths': {
@ -113,6 +113,8 @@ def test_manifest_to_json_forwardslash():
'version': 5,
'url_base': '/',
'items': {
'reftest': {},
'reftest_node': {},
'testharness': {
'a/b': [['/a/b', {}]]
}
@ -126,7 +128,7 @@ def test_manifest_to_json_backslash():
s = SourceFileWithTest("a\\b", "0"*40, item.TestharnessTest)
if os.path.sep == "\\":
assert m.update([(s, True)]) is True
assert m.update([s]) is True
assert m.to_json() == {
'paths': {
@ -135,6 +137,8 @@ def test_manifest_to_json_backslash():
'version': 5,
'url_base': '/',
'items': {
'reftest': {},
'reftest_node': {},
'testharness': {
'a/b': [['/a/b', {}]]
}
@ -144,7 +148,7 @@ def test_manifest_to_json_backslash():
with pytest.raises(ValueError):
# one of these must raise ValueError
# the first must return True if it doesn't raise
assert m.update([(s, True)]) is True
assert m.update([s]) is True
m.to_json()
@ -156,6 +160,8 @@ def test_manifest_from_json_backslash():
'version': 5,
'url_base': '/',
'items': {
'reftest': {},
'reftest_node': {},
'testharness': {
'a\\b': [['/a/b', {}]]
}
@ -172,7 +178,7 @@ def test_reftest_computation_chain():
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
m.update([(s1, True), (s2, True)])
m.update([s1, s2])
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
@ -188,7 +194,7 @@ def test_reftest_computation_chain_update_add():
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
test2 = s2.manifest_items()[1][0]
assert m.update([(s2, True)]) is True
assert m.update([s2]) is True
assert list(m) == [("reftest", test2.path, {test2})]
@ -196,7 +202,7 @@ def test_reftest_computation_chain_update_add():
test1 = s1.manifest_items()[1][0]
# s2's hash is unchanged, but it has gone from a test to a node
assert m.update([(s1, True), (s2, True)]) is True
assert m.update([s1, s2]) is True
test2_node = test2.to_RefTestNode()
@ -210,7 +216,7 @@ def test_reftest_computation_chain_update_remove():
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
assert m.update([(s1, True), (s2, True)]) is True
assert m.update([s1, s2]) is True
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
@ -220,7 +226,7 @@ def test_reftest_computation_chain_update_remove():
("reftest_node", test2.path, {test2_node})]
# s2's hash is unchanged, but it has gone from a node to a test
assert m.update([(s2, True)]) is True
assert m.update([s2]) is True
assert list(m) == [("reftest", test2.path, {test2})]
@ -230,7 +236,7 @@ def test_reftest_computation_chain_update_test_type():
s1 = SourceFileWithTest("test", "0"*40, item.RefTest, [("/test-ref", "==")])
assert m.update([(s1, True)]) is True
assert m.update([s1]) is True
test1 = s1.manifest_items()[1][0]
@ -240,7 +246,7 @@ def test_reftest_computation_chain_update_test_type():
# based on the file contents). The updated manifest should not includes the
# old reftest.
s2 = SourceFileWithTest("test", "1"*40, item.TestharnessTest)
assert m.update([(s2, True)]) is True
assert m.update([s2]) is True
test2 = s2.manifest_items()[1][0]
@ -253,7 +259,7 @@ def test_reftest_computation_chain_update_node_change():
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTestNode, [("/test3", "==")])
assert m.update([(s1, True), (s2, True)]) is True
assert m.update([s1, s2]) is True
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
@ -264,7 +270,7 @@ def test_reftest_computation_chain_update_node_change():
#test2 changes to support type
s2 = SourceFileWithTest("test2", "1"*40, item.SupportFile)
assert m.update([(s1, True), (s2, True)]) is True
assert m.update([s1,s2]) is True
test3 = s2.manifest_items()[1][0]
assert list(m) == [("reftest", test1.path, {test1}),
@ -274,14 +280,12 @@ def test_reftest_computation_chain_update_node_change():
def test_iterpath():
m = manifest.Manifest()
# This has multiple test types from the same file, which isn't really supported,
# so pretend they have different hashes
sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test1-ref", "==")]),
SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test2-ref", "==")]),
SourceFileWithTests("test2", "1"*40, item.TestharnessTest, [("/test2-1.html",),
SourceFileWithTests("test2", "0"*40, item.TestharnessTest, [("/test2-1.html",),
("/test2-2.html",)]),
SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
m.update([(s, True) for s in sources])
m.update(sources)
assert set(item.url for item in m.iterpath("test2")) == set(["/test2",
"/test2-1.html",
@ -292,14 +296,12 @@ def test_iterpath():
def test_filter():
m = manifest.Manifest()
# This has multiple test types from the same file, which isn't really supported,
# so pretend they have different hashes
sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test1-ref", "==")]),
SourceFileWithTest("test2", "1"*40, item.RefTest, [("/test2-ref", "==")]),
SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test2-ref", "==")]),
SourceFileWithTests("test2", "0"*40, item.TestharnessTest, [("/test2-1.html",),
("/test2-2.html",)]),
SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
m.update([(s, True) for s in sources])
m.update(sources)
json = m.to_json()
@ -326,7 +328,7 @@ def test_reftest_node_by_url():
s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, [("/test2", "==")])
s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, [("/test3", "==")])
m.update([(s1, True), (s2, True)])
m.update([s1, s2])
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
@ -337,44 +339,3 @@ def test_reftest_node_by_url():
m._reftest_nodes_by_url = None
assert m.reftest_nodes_by_url == {"/test1": test1,
"/test2": test2_node}
def test_no_update():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
m.update([(s1, True), (s2, True)])
test1 = s1.manifest_items()[1][0]
test2 = s2.manifest_items()[1][0]
assert list(m) == [("testharness", test1.path, {test1}),
("testharness", test2.path, {test2})]
s1_1 = SourceFileWithTest("test1", "1"*40, item.TestharnessTest)
m.update([(s1, True), (s2.rel_path, False)])
test1_1 = s1_1.manifest_items()[1][0]
assert list(m) == [("testharness", test1_1.path, {test1_1}),
("testharness", test2.path, {test2})]
def test_no_update_delete():
m = manifest.Manifest()
s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
m.update([(s1, True), (s2, True)])
s1_1 = SourceFileWithTest("test1", "1"*40, item.TestharnessTest)
m.update([(s1, True)])
test1_1 = s1_1.manifest_items()[1][0]
assert list(m) == [("testharness", test1_1.path, {test1_1})]

Просмотреть файл

@ -13,18 +13,14 @@ wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
logger = get_logger()
def update(tests_root,
manifest,
manifest_path=None,
working_copy=False,
cache_root=None,
rebuild=False):
logger.warning("Deprecated; use manifest.load_and_update instead")
def update(tests_root, manifest, working_copy=False):
logger.info("Updating manifest")
tree = None
if not working_copy:
tree = vcs.Git.for_path(tests_root, manifest.url_base)
if tree is None:
tree = vcs.FileSystem(tests_root, manifest.url_base)
tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
working_copy, rebuild)
return manifest.update(tree)
@ -33,16 +29,26 @@ def update_from_cli(**kwargs):
path = kwargs["path"]
assert tests_root is not None
m = None
if kwargs["download"]:
download_from_github(path, tests_root)
manifest.load_and_update(tests_root,
path,
kwargs["url_base"],
update=True,
rebuild=kwargs["rebuild"],
cache_root=kwargs["cache_root"],
working_copy=kwargs["work"])
if not kwargs.get("rebuild", False):
try:
m = manifest.load(tests_root, path)
except manifest.ManifestVersionMismatch:
logger.info("Manifest version changed, rebuilding")
m = None
if m is None:
m = manifest.Manifest(kwargs["url_base"])
changed = update(tests_root,
m,
working_copy=kwargs["work"])
if changed:
manifest.write(m, path)
def abs_path(path):
@ -67,9 +73,6 @@ def create_parser():
parser.add_argument(
"--no-download", dest="download", action="store_false", default=True,
help="Never attempt to download the manifest.")
parser.add_argument(
"--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
help="Path in which to store any caches (default <tests_root>/.wptcache/")
return parser
@ -84,9 +87,10 @@ def find_top_repo():
return rv
def run(*args, **kwargs):
def run(**kwargs):
if kwargs["path"] is None:
kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json")
update_from_cli(**kwargs)

Просмотреть файл

@ -1,46 +1,15 @@
import json
import os
import platform
import stat
import subprocess
from collections import deque
import platform
from .sourcefile import SourceFile
def get_tree(tests_root, manifest, manifest_path, cache_root,
working_copy=False, rebuild=False):
tree = None
if cache_root is None:
cache_root = os.path.join(tests_root, ".wptcache")
if not os.path.exists(cache_root):
try:
os.makedirs(cache_root)
except IOError:
cache_root = None
if not working_copy:
tree = Git.for_path(tests_root,
manifest.url_base,
manifest_path=manifest_path,
cache_path=cache_root,
rebuild=rebuild)
if tree is None:
tree = FileSystem(tests_root,
manifest.url_base,
manifest_path=manifest_path,
cache_path=cache_root,
rebuild=rebuild)
return tree
class Git(object):
def __init__(self, repo_root, url_base, cache_path, manifest_path=None,
rebuild=False):
self.root = repo_root
def __init__(self, repo_root, url_base):
self.root = os.path.abspath(repo_root)
self.git = Git.get_func(repo_root)
self.url_base = url_base
# rebuild is a noop for now since we don't cache anything
@staticmethod
def get_func(repo_path):
@ -57,11 +26,10 @@ class Git(object):
return git
@classmethod
def for_path(cls, path, url_base, cache_path, manifest_path=None, rebuild=False):
def for_path(cls, path, url_base):
git = Git.get_func(path)
try:
return cls(git("rev-parse", "--show-toplevel").rstrip(), url_base, cache_path,
manifest_path=manifest_path, rebuild=rebuild)
return cls(git("rev-parse", "--show-toplevel").rstrip(), url_base)
except subprocess.CalledProcessError:
return None
@ -106,194 +74,27 @@ class Git(object):
rel_path,
self.url_base,
hash,
contents=contents), True
def dump_caches(self):
pass
contents=contents)
class FileSystem(object):
def __init__(self, root, url_base, cache_path, manifest_path=None, rebuild=False):
from gitignore import gitignore
self.root = os.path.abspath(root)
def __init__(self, root, url_base):
self.root = root
self.url_base = url_base
self.ignore_cache = None
self.mtime_cache = None
if cache_path is not None:
if manifest_path is not None:
self.mtime_cache = MtimeCache(cache_path, root, manifest_path, rebuild)
if gitignore.has_ignore(root):
self.ignore_cache = GitIgnoreCache(cache_path, root, rebuild)
self.path_filter = gitignore.PathFilter(self.root,
extras=[".git/"],
cache=self.ignore_cache)
from gitignore import gitignore
self.path_filter = gitignore.PathFilter(self.root, extras=[".git/"])
def __iter__(self):
mtime_cache = self.mtime_cache
for dirpath, dirnames, filenames in self.path_filter(walk(self.root)):
for filename, path_stat in filenames:
path = os.path.join(dirpath, filename)
if mtime_cache is None or mtime_cache.updated(path, path_stat):
yield SourceFile(self.root, path, self.url_base), True
else:
yield path, False
paths = self.get_paths()
for path in paths:
yield SourceFile(self.root, path, self.url_base)
def dump_caches(self):
for cache in [self.mtime_cache, self.ignore_cache]:
if cache is not None:
cache.dump()
def get_paths(self):
for dirpath, dirnames, filenames in os.walk(self.root):
for filename in filenames:
path = os.path.relpath(os.path.join(dirpath, filename), self.root)
if self.path_filter(path):
yield path
class CacheFile(object):
file_name = None
def __init__(self, cache_root, tests_root, rebuild=False):
self.tests_root = tests_root
if not os.path.exists(cache_root):
os.makedirs(cache_root)
self.path = os.path.join(cache_root, self.file_name)
self.modified = False
self.data = self.load(rebuild)
def dump(self):
if not self.modified:
return
with open(self.path, 'w') as f:
json.dump(self.data, f, indent=1)
def load(self, rebuild=False):
data = {}
try:
if not rebuild:
with open(self.path, 'r') as f:
data = json.load(f)
data = self.check_valid(data)
except IOError:
pass
return data
def check_valid(self, data):
"""Check if the cached data is valid and return an updated copy of the
cache containing only data that can be used."""
return data
class MtimeCache(CacheFile):
file_name = "mtime.json"
def __init__(self, cache_root, tests_root, manifest_path, rebuild=False):
self.manifest_path = manifest_path
super(MtimeCache, self).__init__(cache_root, tests_root, rebuild=False)
def updated(self, rel_path, stat):
"""Return a boolean indicating whether the file changed since the cache was last updated.
This implicitly updates the cache with the new mtime data."""
mtime = stat.st_mtime
if mtime != self.data.get(rel_path):
self.modified = True
self.data[rel_path] = mtime
return True
return False
def check_valid(self, data):
if data.get("/tests_root") != self.tests_root:
self.modified = True
else:
if self.manifest_path is not None and os.path.exists(self.manifest_path):
mtime = os.path.getmtime(self.manifest_path)
if data.get("/manifest_path") != [self.manifest_path, mtime]:
self.modified = True
else:
self.modified = True
if self.modified:
data = {}
data["/tests_root"] = self.tests_root
return data
def dump(self):
if self.manifest_path is None:
raise ValueError
if not os.path.exists(self.manifest_path):
return
mtime = os.path.getmtime(self.manifest_path)
self.data["/manifest_path"] = [self.manifest_path, mtime]
self.data["/tests_root"] = self.tests_root
super(MtimeCache, self).dump()
class GitIgnoreCache(CacheFile):
file_name = "gitignore.json"
def check_valid(self, data):
ignore_path = os.path.join(self.tests_root, ".gitignore")
mtime = os.path.getmtime(ignore_path)
if data.get("/gitignore_file") != [ignore_path, mtime]:
self.modified = True
data = {}
data["/gitignore_file"] = [ignore_path, mtime]
return data
def __contains__(self, key):
return key in self.data
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
if self.data.get(key) != value:
self.modified = True
self.data[key] = value
def walk(root):
"""Re-implementation of os.walk. Returns an iterator over
(dirpath, dirnames, filenames), with some semantic differences
to os.walk.
This has a similar interface to os.walk, with the important difference
that instead of lists of filenames and directory names, it yields
lists of tuples of the form [(name, stat)] where stat is the result of
os.stat for the file. That allows reusing the same stat data in the
caller. It also always returns the dirpath relative to the root, with
the root iself being returned as the empty string.
Unlike os.walk the implementation is not recursive."""
listdir = os.listdir
get_stat = os.stat
listdir = os.listdir
join = os.path.join
is_dir = stat.S_ISDIR
is_link = stat.S_ISLNK
relpath = os.path.relpath
root = os.path.abspath(root)
stack = deque([(root, "")])
while stack:
dir_path, rel_path = stack.popleft()
try:
# Note that listdir and error are globals in this module due
# to earlier import-*.
names = listdir(dir_path)
except OSError:
continue
dirs, non_dirs = [], []
for name in names:
path = join(dir_path, name)
try:
path_stat = get_stat(path)
except OSError:
continue
if is_dir(path_stat.st_mode):
dirs.append((name, path_stat))
else:
non_dirs.append((name, path_stat))
yield rel_path, dirs, non_dirs
for name, path_stat in dirs:
new_path = join(dir_path, name)
if not is_link(path_stat.st_mode):
stack.append((new_path, relpath(new_path, root)))
dirnames[:] = [item for item in dirnames if self.path_filter(
os.path.relpath(os.path.join(dirpath, item), self.root) + "/")]

Просмотреть файл

@ -8,7 +8,7 @@ import sys
from collections import OrderedDict
from six import iteritems
from ..manifest import manifest
from ..manifest import manifest, update
here = os.path.dirname(__file__)
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
@ -190,8 +190,10 @@ def _init_manifest_cache():
return c[manifest_path]
# cache at most one path:manifest
c.clear()
wpt_manifest = manifest.load_and_update(wpt_root, manifest_path, "/",
update=True)
wpt_manifest = manifest.load(wpt_root, manifest_path)
if wpt_manifest is None:
wpt_manifest = manifest.Manifest()
update.update(wpt_root, wpt_manifest)
c[manifest_path] = wpt_manifest
return c[manifest_path]
return load

Просмотреть файл

@ -1,4 +1,5 @@
import hashlib
import json
import os
import urlparse
from abc import ABCMeta, abstractmethod
@ -366,7 +367,6 @@ class TestFilter(object):
if include_tests:
yield test_type, test_path, include_tests
class TagFilter(object):
def __init__(self, tags):
self.tags = set(tags)
@ -378,8 +378,7 @@ class TagFilter(object):
class ManifestLoader(object):
def __init__(self, test_paths, force_manifest_update=False, manifest_download=False,
types=None, meta_filters=None):
def __init__(self, test_paths, force_manifest_update=False, manifest_download=False, types=None, meta_filters=None):
do_delayed_imports()
self.test_paths = test_paths
self.force_manifest_update = force_manifest_update
@ -400,12 +399,57 @@ class ManifestLoader(object):
rv[manifest_file] = path_data
return rv
def load_manifest(self, tests_path, manifest_path, metadata_path, url_base="/", **kwargs):
cache_root = os.path.join(metadata_path, ".cache")
if self.manifest_download:
def create_manifest(self, manifest_path, tests_path, url_base="/"):
self.update_manifest(manifest_path, tests_path, url_base, recreate=True,
download=self.manifest_download)
def update_manifest(self, manifest_path, tests_path, url_base="/",
recreate=False, download=False):
self.logger.info("Updating test manifest %s" % manifest_path)
manifest_log.setup()
json_data = None
if download:
# TODO: make this not github-specific
download_from_github(manifest_path, tests_path)
return manifest.load_and_update(tests_path, manifest_path, url_base,
cache_root=cache_root, update=self.force_manifest_update)
if not recreate:
try:
with open(manifest_path) as f:
json_data = json.load(f)
except IOError:
self.logger.info("Unable to find test manifest")
except ValueError:
self.logger.info("Unable to parse test manifest")
if not json_data:
self.logger.info("Creating test manifest")
manifest_file = manifest.Manifest(url_base)
else:
try:
manifest_file = manifest.Manifest.from_json(tests_path, json_data)
except manifest.ManifestVersionMismatch:
manifest_file = manifest.Manifest(url_base)
manifest_update.update(tests_path, manifest_file, True)
manifest.write(manifest_file, manifest_path)
def load_manifest(self, tests_path, manifest_path, url_base="/", **kwargs):
if (not os.path.exists(manifest_path) or
self.force_manifest_update):
self.update_manifest(manifest_path, tests_path, url_base, download=self.manifest_download)
try:
manifest_file = manifest.load(tests_path, manifest_path, types=self.types, meta_filters=self.meta_filters)
except manifest.ManifestVersionMismatch:
manifest_file = manifest.Manifest(url_base)
if manifest_file.url_base != url_base:
self.logger.info("Updating url_base in manifest from %s to %s" % (manifest_file.url_base,
url_base))
manifest_file.url_base = url_base
manifest.write(manifest_file, manifest_path)
return manifest_file
def iterfilter(filters, iter):
@ -465,12 +509,12 @@ class TestLoader(object):
self._test_ids += [item.id for item in test_dict[test_type]]
return self._test_ids
def get_test(self, manifest_file, manifest_test, inherit_metadata, test_metadata):
def get_test(self, manifest_test, inherit_metadata, test_metadata):
if test_metadata is not None:
inherit_metadata.append(test_metadata)
test_metadata = test_metadata.get_test(manifest_test.id)
return wpttest.from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata)
return wpttest.from_manifest(manifest_test, inherit_metadata, test_metadata)
def load_dir_metadata(self, test_manifest, metadata_path, test_path):
rv = []
@ -493,29 +537,27 @@ class TestLoader(object):
def iter_tests(self):
manifest_items = []
manifests_by_url_base = {}
for manifest in sorted(self.manifests.keys(), key=lambda x:x.url_base):
manifest_iter = iterfilter(self.manifest_filters,
manifest.itertypes(*self.test_types))
manifest_items.extend(manifest_iter)
manifests_by_url_base[manifest.url_base] = manifest
if self.chunker is not None:
manifest_items = self.chunker(manifest_items)
for test_type, test_path, tests in manifest_items:
manifest_file = manifests_by_url_base[iter(tests).next().url_base]
manifest_file = iter(tests).next().manifest
metadata_path = self.manifests[manifest_file]["metadata_path"]
inherit_metadata, test_metadata = self.load_metadata(manifest_file, metadata_path, test_path)
for test in iterfilter(self.meta_filters,
self.iter_wpttest(manifest_file, inherit_metadata, test_metadata, tests)):
self.iter_wpttest(inherit_metadata, test_metadata, tests)):
yield test_path, test_type, test
def iter_wpttest(self, manifest_file, inherit_metadata, test_metadata, tests):
def iter_wpttest(self, inherit_metadata, test_metadata, tests):
for manifest_test in tests:
yield self.get_test(manifest_file, manifest_test, inherit_metadata, test_metadata)
yield self.get_test(manifest_test, inherit_metadata, test_metadata)
def _load_tests(self):
"""Read in the tests from the manifest file and add them to a queue"""

Просмотреть файл

@ -98,7 +98,7 @@ def create_test_manifest(tests, url_base="/"):
source_files = []
for i, (test, _, test_type, _) in enumerate(tests):
if test_type:
source_files.append((SourceFileWithTest(test, str(i) * 40, item_classes[test_type]), True))
source_files.append(SourceFileWithTest(test, str(i) * 40, item_classes[test_type]))
m = manifest.Manifest()
m.update(source_files)
return m

Просмотреть файл

@ -60,7 +60,7 @@ def test_metadata_inherit():
url_base="")
test = tests[0][2].pop()
test_obj = wpttest.from_manifest(tests, test, inherit_metadata, test_metadata.get_test(test.id))
test_obj = wpttest.from_manifest(test, inherit_metadata, test_metadata.get_test(test.id))
assert test_obj.max_assertion_count == 3
assert test_obj.min_assertion_count == 1
assert test_obj.prefs == {"b": "c", "c": "d"}
@ -78,7 +78,7 @@ def test_conditional():
url_base="")
test = tests[1][2].pop()
test_obj = wpttest.from_manifest(tests, test, [], test_metadata.get_test(test.id))
test_obj = wpttest.from_manifest(test, [], test_metadata.get_test(test.id))
assert test_obj.prefs == {"a": "b", "c": "d"}
assert test_obj.expected() == "FAIL"

Просмотреть файл

@ -146,7 +146,7 @@ class Test(object):
return metadata
@classmethod
def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
def from_manifest(cls, manifest_item, inherit_metadata, test_metadata):
timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
protocol = "https" if hasattr(manifest_item, "https") and manifest_item.https else "http"
return cls(manifest_item.source_file.tests_root,
@ -302,7 +302,7 @@ class TestharnessTest(Test):
self.scripts = scripts or []
@classmethod
def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
def from_manifest(cls, manifest_item, inherit_metadata, test_metadata):
timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
protocol = "https" if hasattr(manifest_item, "https") and manifest_item.https else "http"
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
@ -352,7 +352,6 @@ class ReftestTest(Test):
@classmethod
def from_manifest(cls,
manifest_file,
manifest_test,
inherit_metadata,
test_metadata,
@ -395,10 +394,9 @@ class ReftestTest(Test):
references_seen.add(comparison_key)
manifest_node = manifest_file.get_reference(ref_url)
manifest_node = manifest_test.manifest.get_reference(ref_url)
if manifest_node:
reference = ReftestTest.from_manifest(manifest_file,
manifest_node,
reference = ReftestTest.from_manifest(manifest_node,
[],
None,
nodes,
@ -450,6 +448,6 @@ manifest_test_cls = {"reftest": ReftestTest,
"wdspec": WdspecTest}
def from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata):
def from_manifest(manifest_test, inherit_metadata, test_metadata):
test_cls = manifest_test_cls[manifest_test.item_type]
return test_cls.from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata)
return test_cls.from_manifest(manifest_test, inherit_metadata, test_metadata)