Bug 1524639: [taskgraph] Change `load_yaml` to accept any number of path components; r=dustin

Differential Revision: https://phabricator.services.mozilla.com/D18374

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Tom Prince 2019-02-04 16:55:54 +00:00
Родитель 1e846830f3
Коммит 6e635b61b5
11 изменённых файлов: 27 добавлений и 37 удалений

Просмотреть файл

@ -37,7 +37,7 @@ def is_json(data):
@memoize
def read_taskcluster_yml(filename):
'''Load and parse .taskcluster.yml, memoized to save some time'''
return yaml.load_yaml(*os.path.split(filename))
return yaml.load_yaml(filename)
@memoize

Просмотреть файл

@ -7,11 +7,11 @@ from __future__ import absolute_import, print_function, unicode_literals
import os
import logging
import attr
import yaml
from mozpack import path
from .util.schema import validate_schema, Schema, optionally_keyed_by
from voluptuous import Required, Optional, Any
from .util.yaml import load_yaml
logger = logging.getLogger(__name__)
@ -100,8 +100,7 @@ def load_graph_config(root_dir):
raise Exception("Couldn't find taskgraph configuration: {}".format(config_yml))
logger.debug("loading config from `{}`".format(config_yml))
with open(config_yml) as f:
config = yaml.safe_load(f)
config = load_yaml(config_yml)
validate_graph_config(config)
return GraphConfig(config=config, root_dir=root_dir)

Просмотреть файл

@ -12,7 +12,6 @@ import json
import logging
import os
import traceback
import yaml
from . import decision, schema
from .util import match_utc
@ -22,6 +21,7 @@ from taskgraph.util.attributes import match_run_on_projects
from taskgraph.util.hg import calculate_head_rev
from taskgraph.util.schema import resolve_keyed_by
from taskgraph.util.taskcluster import get_session
from taskgraph.util.yaml import load_yaml
# Functions to handle each `job.type` in `.cron.yml`. These are called with
# the contents of the `job` property from `.cron.yml` and should return a
@ -35,8 +35,7 @@ logger = logging.getLogger(__name__)
def load_jobs(params, root):
with open(os.path.join(root, '.cron.yml'), 'rb') as f:
cron_yml = yaml.safe_load(f)
cron_yml = load_yaml(root, '.cron.yml')
schema.validate(cron_yml)
# resolve keyed_by fields in each job

Просмотреть файл

@ -9,12 +9,12 @@ from __future__ import absolute_import, print_function, unicode_literals
import jsone
import pipes
import yaml
import os
import slugid
from taskgraph.util.time import current_json_time
from taskgraph.util.hg import find_hg_revision_push_info
from taskgraph.util.yaml import load_yaml
def run_decision_task(job, params, root):
@ -36,8 +36,7 @@ def run_decision_task(job, params, root):
def make_decision_task(params, root, symbol, arguments=[]):
"""Generate a basic decision task, based on the root .taskcluster.yml"""
with open(os.path.join(root, '.taskcluster.yml'), 'rb') as f:
taskcluster_yml = yaml.safe_load(f)
taskcluster_yml = load_yaml(root, '.taskcluster.yml')
push_info = find_hg_revision_push_info(
params['repository_url'],

Просмотреть файл

@ -5,7 +5,6 @@
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
import yaml
import copy
import attr
@ -21,6 +20,7 @@ from .util.verify import (
verify_docs,
verifications,
)
from .util.yaml import load_yaml
from .config import load_graph_config, GraphConfig
logger = logging.getLogger(__name__)
@ -84,8 +84,7 @@ class Kind(object):
raise KindNotFound(kind_yml)
logger.debug("loading kind `{}` from `{}`".format(kind_name, path))
with open(kind_yml) as f:
config = yaml.safe_load(f)
config = load_yaml(kind_yml)
return cls(kind_name, path, config, graph_config)

Просмотреть файл

@ -24,11 +24,11 @@ import os
import re
import jsone
import yaml
from slugid import nice as slugid
from .task import Task
from .graph import Graph
from .taskgraph import TaskGraph
from .util.yaml import load_yaml
here = os.path.abspath(os.path.dirname(__file__))
logger = logging.getLogger(__name__)
@ -193,9 +193,7 @@ class apply_jsone_templates(object):
'target_tasks': self.target_tasks,
}
template_path = os.path.join(self.template_dir, template + '.yml')
with open(template_path) as f:
template = yaml.safe_load(f)
template = load_yaml(self.template_dir, template + '.yml')
result = jsone.render(template, context) or {}
for attr in ('task', 'attributes'):
if attr in result:

Просмотреть файл

@ -6,7 +6,6 @@ from __future__ import absolute_import, print_function, unicode_literals
import os
import json
import yaml
import shutil
import unittest
import tempfile
@ -14,6 +13,7 @@ import tempfile
from mock import patch
from mozunit import main, MockedOpen
from taskgraph import decision
from taskgraph.util.yaml import load_yaml
FAKE_GRAPH_CONFIG = {'product-dir': 'browser'}
@ -40,8 +40,7 @@ class TestDecision(unittest.TestCase):
try:
decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
decision.write_artifact("artifact.yml", data)
with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.yml")) as f:
self.assertEqual(yaml.safe_load(f), data)
self.assertEqual(load_yaml(decision.ARTIFACTS_DIR, "artifact.yml"), data)
finally:
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)

Просмотреть файл

@ -7,12 +7,11 @@ Transform the beetmover task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
import yaml
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.scriptworker import (
get_release_config,
)
from taskgraph.util.yaml import load_yaml
from mozrelease.balrog import generate_update_properties
from mozilla_version.gecko import GeckoVersion
@ -26,8 +25,7 @@ def generate_update_line(config, jobs):
release_config = get_release_config(config)
for job in jobs:
config_file = job.pop('whats-new-config')
with open(config_file, "rb") as f:
update_config = yaml.safe_load(f)
update_config = load_yaml(config_file)
product = job['shipping-product']
if product == 'devedition':

Просмотреть файл

@ -12,7 +12,6 @@ import requests_unixsocket
import sys
import urllib
import urlparse
import yaml
from mozbuild.util import memoize
from mozpack.files import GeneratedFile
@ -21,6 +20,8 @@ from mozpack.archive import (
)
from .. import GECKO
from .yaml import load_yaml
IMAGE_DIR = os.path.join(GECKO, 'taskcluster', 'docker')
@ -273,13 +274,11 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
def image_paths():
"""Return a map of image name to paths containing their Dockerfile.
"""
with open(os.path.join(GECKO, 'taskcluster', 'ci', 'docker-image',
'kind.yml')) as fh:
config = yaml.safe_load(fh)
return {
k: os.path.join(IMAGE_DIR, v.get('definition', k))
for k, v in config['jobs'].items()
}
config = load_yaml(GECKO, 'taskcluster', 'ci', 'docker-image', 'kind.yml')
return {
k: os.path.join(IMAGE_DIR, v.get('definition', k))
for k, v in config['jobs'].items()
}
def image_path(name):

Просмотреть файл

@ -417,7 +417,7 @@ def generate_beetmover_upstream_artifacts(job, platform, locale=None, dependenci
"""
base_artifact_prefix = get_artifact_prefix(job)
resolve_keyed_by(job, 'attributes.artifact_map', 'artifact map', platform=platform)
map_config = load_yaml(*os.path.split(job['attributes']['artifact_map']))
map_config = load_yaml(job['attributes']['artifact_map'])
upstream_artifacts = list()
if not locale:
@ -480,7 +480,7 @@ def generate_beetmover_compressed_upstream_artifacts(job, dependencies=None):
list: A list of dictionaries conforming to the upstream_artifacts spec.
"""
base_artifact_prefix = get_artifact_prefix(job)
map_config = load_yaml(*os.path.split(job['attributes']['artifact_map']))
map_config = load_yaml(job['attributes']['artifact_map'])
upstream_artifacts = list()
if not dependencies:
@ -532,7 +532,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
"""
platform = kwargs.get('platform', '')
resolve_keyed_by(job, 'attributes.artifact_map', 'artifact map', platform=platform)
map_config = load_yaml(*os.path.split(job['attributes']['artifact_map']))
map_config = load_yaml(job['attributes']['artifact_map'])
base_artifact_prefix = map_config.get('base_artifact_prefix', get_artifact_prefix(job))
artifacts = list()

Просмотреть файл

@ -8,9 +8,9 @@ import os
import yaml
def load_yaml(path, name):
def load_yaml(*parts):
"""Convenience function to load a YAML file in the given path. This is
useful for loading kind configuration files from the kind path."""
filename = os.path.join(path, name)
filename = os.path.join(*parts)
with open(filename, "rb") as f:
return yaml.safe_load(f)