Bug 1524639: [taskgraph] Enforce unicode strings in schemas r=tomprince

Differential Revision: https://phabricator.services.mozilla.com/D18376

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Ricky Stewart 2020-01-15 20:33:20 +00:00
Родитель 7d06e9d58a
Коммит fecc988cc6
65 изменённых файлов: 575 добавлений и 506 удалений

Просмотреть файл

@ -11,6 +11,7 @@ import argparse
import json
import logging
import os
from six import text_type
import sys
import traceback
import re
@ -126,51 +127,39 @@ class MachCommands(MachCommandBase):
@SubCommand('taskgraph', 'decision',
description="Run the decision task")
@CommandArgument('--root', '-r',
@CommandArgument('--root', '-r', type=text_type,
help="root of the taskgraph definition relative to topsrcdir")
@CommandArgument('--base-repository',
required=True,
@CommandArgument('--base-repository', type=text_type, required=True,
help='URL for "base" repository to clone')
@CommandArgument('--head-repository',
required=True,
@CommandArgument('--head-repository', type=text_type, required=True,
help='URL for "head" repository to fetch revision from')
@CommandArgument('--head-ref',
required=True,
@CommandArgument('--head-ref', type=text_type, required=True,
help='Reference (this is same as rev usually for hg)')
@CommandArgument('--head-rev',
required=True,
@CommandArgument('--head-rev', type=text_type, required=True,
help='Commit revision to use from head repository')
@CommandArgument('--comm-base-repository',
required=False,
@CommandArgument('--comm-base-repository', type=text_type, required=False,
help='URL for "base" comm-* repository to clone')
@CommandArgument('--comm-head-repository',
required=False,
@CommandArgument('--comm-head-repository', type=text_type, required=False,
help='URL for "head" comm-* repository to fetch revision from')
@CommandArgument('--comm-head-ref',
required=False,
@CommandArgument('--comm-head-ref', type=text_type, required=False,
help='comm-* Reference (this is same as rev usually for hg)')
@CommandArgument('--comm-head-rev',
required=False,
@CommandArgument('--comm-head-rev', type=text_type, required=False,
help='Commit revision to use from head comm-* repository')
@CommandArgument('--project',
required=True,
help='Project to use for creating task graph. Example: --project=try')
@CommandArgument('--pushlog-id',
dest='pushlog_id',
required=True,
default=0)
@CommandArgument(
'--project', type=text_type, required=True,
help='Project to use for creating task graph. Example: --project=try')
@CommandArgument('--pushlog-id', type=text_type, dest='pushlog_id',
required=True, default='0')
@CommandArgument('--pushdate',
dest='pushdate',
required=True,
type=int,
default=0)
@CommandArgument('--owner',
required=True,
@CommandArgument('--owner', type=text_type, required=True,
help='email address of who owns this graph')
@CommandArgument('--level',
required=True,
@CommandArgument('--level', type=text_type, required=True,
help='SCM level of this repository')
@CommandArgument('--target-tasks-method',
@CommandArgument('--target-tasks-method', type=text_type,
help='method for selecting the target tasks to generate')
@CommandArgument('--optimize-target-tasks',
type=lambda flag: bool(strtobool(flag)),
@ -178,11 +167,10 @@ class MachCommands(MachCommandBase):
help='If specified, this indicates whether the target '
'tasks are eligible for optimization. Otherwise, '
'the default for the project is used.')
@CommandArgument('--try-task-config-file',
@CommandArgument('--try-task-config-file', type=text_type,
help='path to try task configuration file')
@CommandArgument('--tasks-for',
help='the tasks_for value used to generate this task',
required=True)
@CommandArgument('--tasks-for', type=text_type, required=True,
help='the tasks_for value used to generate this task')
@CommandArgument('--include-push-tasks',
action='store_true',
help='Whether tasks from the on-push graph should be re-used '

Просмотреть файл

@ -8,6 +8,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
import six
import requests
from requests.exceptions import HTTPError
@ -144,9 +145,10 @@ def backfill_action(parameters, graph_config, input, task_group_id, task_id):
verify_args.append('--gpu-required')
if 'testPath' in input:
task.task['payload']['env']['MOZHARNESS_TEST_PATHS'] = json.dumps({
task.task['extra']['suite']['flavor']: [input['testPath']]
})
task.task['payload']['env']['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
json.dumps({
task.task['extra']['suite']['flavor']: [input['testPath']]
}))
cmd_parts = task.task['payload']['command']
keep_args = ['--installer-url', '--download-symbols', '--test-packages-url']

Просмотреть файл

@ -182,8 +182,8 @@ def create_isolate_failure_tasks(task_definition, failures, level, times):
saved_command,
extra_args=include_args)
else:
task_definition['payload']['env']['MOZHARNESS_TEST_PATHS'] = json.dumps(
{suite: [failure_path]})
task_definition['payload']['env']['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
json.dumps({suite: [failure_path]}))
logger.info("Creating task for path {} with command {}".format(
failure_path,

Просмотреть файл

@ -19,45 +19,45 @@ logger = logging.getLogger(__name__)
graph_config_schema = Schema({
# The trust-domain for this graph.
# (See https://firefox-source-docs.mozilla.org/taskcluster/taskcluster/taskgraph.html#taskgraph-trust-domain) # noqa
Required('trust-domain'): basestring,
Required('trust-domain'): text_type,
# This specifes the prefix for repo parameters that refer to the project being built.
# This selects between `head_rev` and `comm_head_rev` and related paramters.
# (See http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#push-information # noqa
# and http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#comm-push-information) # noqa
Required('project-repo-param-prefix'): basestring,
Required('project-repo-param-prefix'): text_type,
# This specifies the top level directory of the application being built.
# ie. "browser/" for Firefox, "comm/mail/" for Thunderbird.
Required('product-dir'): basestring,
Required('product-dir'): text_type,
Required('treeherder'): {
# Mapping of treeherder group symbols to descriptive names
Required('group-names'): {basestring: basestring}
Required('group-names'): {text_type: text_type}
},
Required('index'): {
Required('products'): [basestring]
Required('products'): [text_type]
},
Required('try'): {
# We have a few platforms for which we want to do some "extra" builds, or at
# least build-ish things. Sort of. Anyway, these other things are implemented
# as different "platforms". These do *not* automatically ride along with "-p
# all"
Required('ridealong-builds'): {basestring: [basestring]},
Required('ridealong-builds'): {text_type: [text_type]},
},
Required('release-promotion'): {
Required('products'): [basestring],
Required('flavors'): {basestring: {
Required('product'): basestring,
Required('target-tasks-method'): basestring,
Required('products'): [text_type],
Required('flavors'): {text_type: {
Required('product'): text_type,
Required('target-tasks-method'): text_type,
Optional('is-rc'): bool,
Optional('rebuild-kinds'): [basestring],
Optional('rebuild-kinds'): [text_type],
Optional('version-bump'): bool,
Optional('partial-updates'): bool,
}},
},
Required('scriptworker'): {
# Prefix to add to scopes controlling scriptworkers
Required('scope-prefix'): basestring,
Required('scope-prefix'): text_type,
# Mapping of scriptworker types to scopes they accept
Required('worker-types'): {basestring: [basestring]}
Required('worker-types'): {text_type: [text_type]}
},
Required('task-priority'): optionally_keyed_by('project', Any(
'highest',
@ -71,10 +71,10 @@ graph_config_schema = Schema({
Required('partner-urls'): {
Required('release-partner-repack'):
optionally_keyed_by('release-product', 'release-level', 'release-type',
Any(basestring, None)),
Any(text_type, None)),
Required('release-eme-free-repack'):
optionally_keyed_by('release-product', 'release-level', 'release-type',
Any(basestring, None)),
Any(text_type, None)),
},
Required('workers'): {
Required('aliases'): {

Просмотреть файл

@ -7,6 +7,8 @@
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from voluptuous import Any, Required, All, Optional
from taskgraph.util.schema import (
optionally_keyed_by,
@ -23,7 +25,7 @@ def even_15_minutes(minutes):
cron_yml_schema = Schema({
'jobs': [{
# Name of the crontask (must be unique)
Required('name'): basestring,
Required('name'): text_type,
# what to run
@ -32,10 +34,10 @@ cron_yml_schema = Schema({
Required('type'): 'decision-task',
# Treeherder symbol for the cron task
Required('treeherder-symbol'): basestring,
Required('treeherder-symbol'): text_type,
# --target-tasks-method './mach taskgraph decision' argument
Required('target-tasks-method'): basestring,
Required('target-tasks-method'): text_type,
Optional(
'optimize-target-tasks',
@ -51,7 +53,7 @@ cron_yml_schema = Schema({
Optional(
'rebuild-kinds',
description='Kinds that should not be re-used from the on-push graph.',
): [basestring],
): [text_type],
},
# when to run it
@ -60,7 +62,7 @@ cron_yml_schema = Schema({
# run on all projects for which cron tasks are set up. This works just like the
# `run_on_projects` attribute, where strings like "release" and "integration" are
# expanded to cover multiple repositories. (taskcluster/docs/attributes.rst)
'run-on-projects': [basestring],
'run-on-projects': [text_type],
# Array of times at which this task should run. These *must* be a
# multiple of 15 minutes, the minimum scheduling interval. This field

Просмотреть файл

@ -12,6 +12,7 @@ import time
import sys
from collections import defaultdict
import six
from six import text_type
from redo import retry
import yaml
@ -120,11 +121,11 @@ visual_metrics_jobs_schema = Schema({
})
try_task_config_schema = Schema({
Required('tasks'): [basestring],
Required('tasks'): [text_type],
Optional('browsertime'): bool,
Optional('chemspill-prio'): bool,
Optional('disable-pgo'): bool,
Optional('env'): {basestring: basestring},
Optional('env'): {text_type: text_type},
Optional('gecko-profile'): bool,
Optional('rebuild'): int,
Optional('use-artifact-builds'): bool,
@ -144,7 +145,7 @@ Schema for try_task_config.json files.
"""
try_task_config_schema_v2 = Schema({
Optional('parameters'): {basestring: object},
Optional('parameters'): {text_type: object},
})
@ -311,8 +312,8 @@ def get_decision_parameters(graph_config, options):
# use the pushdate as build_date if given, else use current time
parameters['build_date'] = parameters['pushdate'] or int(time.time())
# moz_build_date is the build identifier based on build_date
parameters['moz_build_date'] = time.strftime("%Y%m%d%H%M%S",
time.gmtime(parameters['build_date']))
parameters['moz_build_date'] = six.ensure_text(
time.strftime("%Y%m%d%H%M%S", time.gmtime(parameters['build_date'])))
project = parameters['project']
try:

Просмотреть файл

@ -7,6 +7,7 @@ import logging
import os
import copy
import attr
from six import text_type
from . import filter_tasks
from .graph import Graph
@ -35,8 +36,8 @@ class KindNotFound(Exception):
@attr.s(frozen=True)
class Kind(object):
name = attr.ib(type=basestring)
path = attr.ib(type=basestring)
name = attr.ib(type=text_type)
path = attr.ib(type=text_type)
config = attr.ib(type=dict)
graph_config = attr.ib(type=GraphConfig)

Просмотреть файл

@ -5,6 +5,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import copy
from six import text_type
from voluptuous import Required
@ -17,7 +18,7 @@ schema = Schema({
Required(
'dependent-tasks',
'dictionary of dependent tasks, keyed by kind',
): {basestring: Task},
): {text_type: Task},
})
@ -178,7 +179,7 @@ def get_primary_dep(config, dep_tasks):
"""
primary_dependencies = config.get('primary-dependency')
if isinstance(primary_dependencies, basestring):
if isinstance(primary_dependencies, text_type):
primary_dependencies = [primary_dependencies]
if not primary_dependencies:
assert len(dep_tasks) == 1, "Must define a primary-dependency!"

Просмотреть файл

@ -6,6 +6,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import io
import os.path
import json
from datetime import datetime
@ -22,6 +23,9 @@ from voluptuous import (
Schema,
)
import six
from six import text_type
from . import GECKO
from .util.attributes import release_level
@ -32,11 +36,11 @@ class ParameterMismatch(Exception):
@memoize
def get_head_ref():
return get_repository_object(GECKO).head_ref
return six.ensure_text(get_repository_object(GECKO).head_ref)
def get_contents(path):
with open(path, "r") as fh:
with io.open(path, "r") as fh:
contents = fh.readline().rstrip()
return contents
@ -54,48 +58,48 @@ def get_app_version(product_dir='browser'):
base_schema = {
Required('app_version'): basestring,
Required('base_repository'): basestring,
Required('app_version'): text_type,
Required('base_repository'): text_type,
Required('build_date'): int,
Required('build_number'): int,
Inclusive('comm_base_repository', 'comm'): basestring,
Inclusive('comm_head_ref', 'comm'): basestring,
Inclusive('comm_head_repository', 'comm'): basestring,
Inclusive('comm_head_rev', 'comm'): basestring,
Required('do_not_optimize'): [basestring],
Required('existing_tasks'): {basestring: basestring},
Required('filters'): [basestring],
Required('head_ref'): basestring,
Required('head_repository'): basestring,
Required('head_rev'): basestring,
Required('hg_branch'): basestring,
Required('level'): basestring,
Required('message'): basestring,
Required('moz_build_date'): basestring,
Required('next_version'): Any(None, basestring),
Inclusive('comm_base_repository', 'comm'): text_type,
Inclusive('comm_head_ref', 'comm'): text_type,
Inclusive('comm_head_repository', 'comm'): text_type,
Inclusive('comm_head_rev', 'comm'): text_type,
Required('do_not_optimize'): [text_type],
Required('existing_tasks'): {text_type: text_type},
Required('filters'): [text_type],
Required('head_ref'): text_type,
Required('head_repository'): text_type,
Required('head_rev'): text_type,
Required('hg_branch'): text_type,
Required('level'): text_type,
Required('message'): text_type,
Required('moz_build_date'): text_type,
Required('next_version'): Any(None, text_type),
Required('optimize_target_tasks'): bool,
Required('owner'): basestring,
Required('phabricator_diff'): Any(None, basestring),
Required('project'): basestring,
Required('owner'): text_type,
Required('phabricator_diff'): Any(None, text_type),
Required('project'): text_type,
Required('pushdate'): int,
Required('pushlog_id'): basestring,
Required('pushlog_id'): text_type,
Required('release_enable_emefree'): bool,
Required('release_enable_partners'): bool,
Required('release_eta'): Any(None, basestring),
Required('release_history'): {basestring: dict},
Required('release_partners'): Any(None, [basestring]),
Required('release_eta'): Any(None, text_type),
Required('release_history'): {text_type: dict},
Required('release_partners'): Any(None, [text_type]),
Required('release_partner_config'): Any(None, dict),
Required('release_partner_build_number'): int,
Required('release_type'): basestring,
Required('release_product'): Any(None, basestring),
Required('required_signoffs'): [basestring],
Required('release_type'): text_type,
Required('release_product'): Any(None, text_type),
Required('required_signoffs'): [text_type],
Required('signoff_urls'): dict,
Required('target_tasks_method'): basestring,
Required('tasks_for'): basestring,
Required('try_mode'): Any(None, basestring),
Required('target_tasks_method'): text_type,
Required('tasks_for'): text_type,
Required('try_mode'): Any(None, text_type),
Required('try_options'): Any(None, dict),
Required('try_task_config'): dict,
Required('version'): basestring,
Required('version'): text_type,
}
@ -139,7 +143,7 @@ class Parameters(ReadOnlyDict):
'hg_branch': 'default',
'level': '3',
'message': '',
'moz_build_date': now.strftime("%Y%m%d%H%M%S"),
'moz_build_date': six.ensure_text(now.strftime("%Y%m%d%H%M%S")),
'next_version': None,
'optimize_target_tasks': True,
'owner': 'nobody@mozilla.com',
@ -200,10 +204,10 @@ class Parameters(ReadOnlyDict):
Determine the VCS URL for viewing a file in the tree, suitable for
viewing by a human.
:param basestring path: The path, relative to the root of the repository.
:param text_type path: The path, relative to the root of the repository.
:param bool pretty: Whether to return a link to a formatted version of the
file, or the raw file version.
:return basestring: The URL displaying the given path.
:return text_type: The URL displaying the given path.
"""
if path.startswith('comm/'):
path = path[len('comm/'):]

Просмотреть файл

@ -4,6 +4,7 @@
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
import unittest
from mozunit import main
from taskgraph.util.schema import (
@ -14,7 +15,7 @@ from taskgraph.util.schema import (
schema = Schema({
'x': int,
'y': basestring,
'y': text_type,
})

Просмотреть файл

@ -7,6 +7,7 @@ Transform the per-locale balrog task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -23,7 +24,7 @@ from voluptuous import Optional
balrog_description_schema = schema.extend({
# unique label to describe this balrog task, defaults to balrog-{dep.label}
Optional('label'): basestring,
Optional('label'): text_type,
Optional(

Просмотреть файл

@ -5,6 +5,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import attr
from six import text_type
from ..config import GraphConfig
from ..parameters import Parameters
@ -22,7 +23,7 @@ class TransformConfig(object):
kind = attr.ib()
# the path to the kind configuration directory
path = attr.ib(type=basestring)
path = attr.ib(type=text_type)
# the parsed contents of kind.yml
config = attr.ib(type=dict)

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from voluptuous import Optional, Required
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
@ -25,10 +26,10 @@ transforms = TransformSequence()
beetmover_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Optional('label'): basestring,
Optional('label'): text_type,
# treeherder is allowed here to override any defaults we use for beetmover. See
# taskcluster/taskgraph/transforms/task.py for the schema details, and the
@ -36,7 +37,7 @@ beetmover_description_schema = schema.extend({
Optional('treeherder'): task_description_schema['treeherder'],
# locale is passed only for l10n beetmoving
Optional('locale'): basestring,
Optional('locale'): text_type,
Required('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
@ -130,9 +131,9 @@ def craft_release_properties(config, job):
return {
'app-name': app_name,
'app-version': str(params['app_version']),
'app-version': params['app_version'],
'branch': params['project'],
'build-id': str(params['moz_build_date']),
'build-id': params['moz_build_date'],
'hash-type': 'sha512',
'platform': build_platform,
}

Просмотреть файл

@ -8,6 +8,7 @@ Transform the checksums signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -22,11 +23,11 @@ from taskgraph.util.treeherder import replace_group
from taskgraph.transforms.task import task_description_schema
beetmover_checksums_description_schema = schema.extend({
Required('depname', default='build'): basestring,
Required('attributes'): {basestring: object},
Optional('label'): basestring,
Required('depname', default='build'): text_type,
Required('attributes'): {text_type: object},
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('locale'): basestring,
Optional('locale'): text_type,
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
})

Просмотреть файл

@ -7,6 +7,7 @@ Transform release-beetmover-source-checksums into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -15,8 +16,8 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
beetmover_checksums_description_schema = schema.extend({
Required('depname', default='build'): basestring,
Optional('label'): basestring,
Required('depname', default='build'): text_type,
Optional('label'): text_type,
Optional('extra'): object,
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from copy import deepcopy
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import \
@ -27,14 +28,14 @@ from voluptuous import Required, Optional
beetmover_description_schema = schema.extend({
Required('depname', default='build'): basestring,
Optional('label'): basestring,
Required('depname', default='build'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Required('run-on-projects'): task_description_schema['run-on-projects'],
Required('run-on-hg-branches'): task_description_schema['run-on-hg-branches'],
Optional('bucket-scope'): optionally_keyed_by('release-level', basestring),
Optional('bucket-scope'): optionally_keyed_by('release-level', text_type),
Optional('shipping-phase'): optionally_keyed_by(
'project', task_description_schema['shipping-phase']
),

Просмотреть файл

@ -7,6 +7,7 @@ Transform release-beetmover-langpack-checksums into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -21,11 +22,11 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
beetmover_checksums_description_schema = schema.extend({
Required('depname', default='build'): basestring,
Required('attributes'): {basestring: object},
Optional('label'): basestring,
Required('depname', default='build'): text_type,
Required('attributes'): {text_type: object},
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('locale'): basestring,
Optional('locale'): text_type,
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
})

Просмотреть файл

@ -7,6 +7,7 @@ Transform the beetmover-push-to-release task into a task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import (
Schema,
@ -21,16 +22,16 @@ from voluptuous import Required, Optional
beetmover_push_to_release_description_schema = Schema({
Required('name'): basestring,
Required('product'): basestring,
Required('treeherder-platform'): basestring,
Optional('attributes'): {basestring: object},
Required('name'): text_type,
Required('product'): text_type,
Required('treeherder-platform'): text_type,
Optional('attributes'): {text_type: object},
Optional('job-from'): task_description_schema['job-from'],
Optional('run'): {basestring: object},
Optional('run'): {text_type: object},
Optional('run-on-projects'): task_description_schema['run-on-projects'],
Optional('dependencies'): {basestring: taskref_or_string},
Optional('index'): {basestring: basestring},
Optional('routes'): [basestring],
Optional('dependencies'): {text_type: taskref_or_string},
Optional('index'): {text_type: text_type},
Optional('routes'): [text_type],
Required('shipping-phase'): task_description_schema['shipping-phase'],
Required('shipping-product'): task_description_schema['shipping-product'],
Optional('extra'): task_description_schema['extra'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform the beetmover task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.multi_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -32,10 +33,10 @@ logger = logging.getLogger(__name__)
beetmover_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Required('label'): basestring,
Required('label'): text_type,
# treeherder is allowed here to override any defaults we use for beetmover. See
# taskcluster/taskgraph/transforms/task.py for the schema details, and the
@ -45,7 +46,7 @@ beetmover_description_schema = schema.extend({
Optional('attributes'): task_description_schema['attributes'],
# locale is passed only for l10n beetmoving
Optional('locale'): basestring,
Optional('locale'): text_type,
Required('shipping-phase'): task_description_schema['shipping-phase'],
# Optional until we fix asan (run_on_projects?)
Optional('shipping-product'): task_description_schema['shipping-product'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform the beetmover task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -37,14 +38,14 @@ logger = logging.getLogger(__name__)
beetmover_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Optional('label'): basestring,
Optional('label'): text_type,
Required('partner-bucket-scope'): optionally_keyed_by('release-level', basestring),
Required('partner-public-path'): Any(None, basestring),
Required('partner-private-path'): Any(None, basestring),
Required('partner-bucket-scope'): optionally_keyed_by('release-level', text_type),
Required('partner-public-path'): Any(None, text_type),
Required('partner-private-path'): Any(None, text_type),
Optional('extra'): object,
Required('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform release-beetmover-source-checksums into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -20,10 +21,10 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
beetmover_checksums_description_schema = schema.extend({
Required('depname', default='build'): basestring,
Optional('label'): basestring,
Required('depname', default='build'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('locale'): basestring,
Optional('locale'): text_type,
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('attributes'): task_description_schema['attributes'],

Просмотреть файл

@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import json
from pipes import quote as shell_quote
import six
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.scriptworker import get_release_config
from taskgraph.util.schema import (
@ -87,7 +88,8 @@ def handle_keyed_by(config, jobs):
if 'extra-config' in job['run']:
env = job['worker'].setdefault('env', {})
env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(job['run']['extra-config'])
env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
json.dumps(job['run']['extra-config']))
del job["run"]["extra-config"]
yield job

Просмотреть файл

@ -8,6 +8,7 @@ defined in kind.yml
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from taskgraph.util.schema import (
@ -21,19 +22,19 @@ from voluptuous import (
)
index_or_string = Any(
basestring,
{Required('index-search'): basestring},
text_type,
{Required('index-search'): text_type},
)
diff_description_schema = Schema({
# Name of the diff task.
Required('name'): basestring,
Required('name'): text_type,
# Treeherder symbol.
Required('symbol'): basestring,
Required('symbol'): text_type,
# relative path (from config.path) to the file the task was defined in.
Optional('job-from'): basestring,
Optional('job-from'): text_type,
# Original and new builds to compare.
Required('original'): index_or_string,
@ -41,10 +42,10 @@ diff_description_schema = Schema({
# Arguments to pass to diffoscope, used for job-defaults in
# taskcluster/ci/diffoscope/kind.yml
Optional('args'): basestring,
Optional('args'): text_type,
# Extra arguments to pass to diffoscope, that can be set per job.
Optional('extra-args'): basestring,
Optional('extra-args'): text_type,
# Fail the task when differences are detected.
Optional('fail-on-diff'): bool,
@ -55,7 +56,7 @@ diff_description_schema = Schema({
Optional('unpack'): bool,
# Commands to run before performing the diff.
Optional('pre-diff-commands'): [basestring],
Optional('pre-diff-commands'): [text_type],
# Only run the task on a set of projects/branches.
Optional('run-on-projects'): task_description_schema['run-on-projects'],
@ -77,7 +78,7 @@ def fill_template(config, tasks):
previous_artifact = None
for k in ('original', 'new'):
value = task[k]
if isinstance(value, basestring):
if isinstance(value, text_type):
deps[k] = value
dep_name = k
os_hint = value

Просмотреть файл

@ -8,6 +8,7 @@ import os
import re
from collections import deque
from six import text_type
import taskgraph
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import _run_task_suffix
@ -31,27 +32,27 @@ transforms = TransformSequence()
docker_image_schema = Schema({
# Name of the docker image.
Required('name'): basestring,
Required('name'): text_type,
# Name of the parent docker image.
Optional('parent'): basestring,
Optional('parent'): text_type,
# Treeherder symbol.
Required('symbol'): basestring,
Required('symbol'): text_type,
# relative path (from config.path) to the file the docker image was defined
# in.
Optional('job-from'): basestring,
Optional('job-from'): text_type,
# Arguments to use for the Dockerfile.
Optional('args'): {basestring: basestring},
Optional('args'): {text_type: text_type},
# Name of the docker image definition under taskcluster/docker, when
# different from the docker image name.
Optional('definition'): basestring,
Optional('definition'): text_type,
# List of package tasks this docker image depends on.
Optional('packages'): [basestring],
Optional('packages'): [text_type],
Optional(
"index",

Просмотреть файл

@ -9,8 +9,10 @@ from __future__ import absolute_import, unicode_literals
from mozbuild.shellutil import quote as shell_quote
import io
import os
import re
from six import text_type
from voluptuous import (
Any,
@ -38,24 +40,24 @@ CACHE_TYPE = 'content.v1'
FETCH_SCHEMA = Schema({
# Name of the task.
Required('name'): basestring,
Required('name'): text_type,
# Relative path (from config.path) to the file the task was defined
# in.
Optional('job-from'): basestring,
Optional('job-from'): text_type,
# Description of the task.
Required('description'): basestring,
Required('description'): text_type,
Required('fetch'): Any(
{
'type': 'static-url',
# The URL to download.
Required('url'): basestring,
Required('url'): text_type,
# The SHA-256 of the downloaded content.
Required('sha256'): basestring,
Required('sha256'): text_type,
# Size of the downloaded entity, in bytes.
Required('size'): int,
@ -65,17 +67,17 @@ FETCH_SCHEMA = Schema({
# URL where GPG signature document can be obtained. Can contain the
# value ``{url}``, which will be substituted with the value from
# ``url``.
Required('sig-url'): basestring,
Required('sig-url'): text_type,
# Path to file containing GPG public key(s) used to validate
# download.
Required('key-path'): basestring,
Required('key-path'): text_type,
},
# The name to give to the generated artifact. Defaults to the file
# portion of the URL. Using a different extension converts the
# archive to the given type. Only conversion to .tar.zst is
# supported.
Optional('artifact-name'): basestring,
Optional('artifact-name'): text_type,
# Strip the given number of path components at the beginning of
# each file entry in the archive.
@ -84,7 +86,7 @@ FETCH_SCHEMA = Schema({
# Add the given prefix to each file entry in the archive.
# Requires an artifact-name ending with .tar.zst.
Optional('add-prefix'): basestring,
Optional('add-prefix'): text_type,
# IMPORTANT: when adding anything that changes the behavior of the task,
# it is important to update the digest data used to compute cache hits.
@ -92,23 +94,23 @@ FETCH_SCHEMA = Schema({
{
'type': 'chromium-fetch',
Required('script'): basestring,
Required('script'): text_type,
# Platform type for chromium build
Required('platform'): basestring,
Required('platform'): text_type,
# Chromium revision to obtain
Optional('revision'): basestring,
Optional('revision'): text_type,
# The name to give to the generated artifact.
Required('artifact-name'): basestring
Required('artifact-name'): text_type
},
{
'type': 'git',
Required('repo'): basestring,
Required('revision'): basestring,
Optional('artifact-name'): basestring,
Optional('path-prefix'): basestring,
Required('repo'): text_type,
Required('revision'): text_type,
Optional('artifact-name'): text_type,
Optional('path-prefix'): text_type,
}
),
})
@ -205,7 +207,7 @@ def create_fetch_url_task(config, job):
key_path = os.path.join(taskgraph.GECKO, fetch['gpg-signature'][
'key-path'])
with open(key_path, 'rb') as fh:
with io.open(key_path, 'r') as fh:
gpg_key = fh.read()
env['FETCH_GPG_KEY'] = gpg_key

Просмотреть файл

@ -7,6 +7,7 @@ Transform the repackage signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -17,8 +18,8 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
repackage_signing_description_schema = schema.extend({
Required('depname', default='geckodriver-repackage'): basestring,
Optional('label'): basestring,
Required('depname', default='geckodriver-repackage'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],
})

Просмотреть файл

@ -7,6 +7,7 @@ Transform the push-apk kind into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from taskgraph.util.schema import resolve_keyed_by, Schema
@ -14,7 +15,7 @@ from taskgraph.util.schema import resolve_keyed_by, Schema
from voluptuous import Required
google_play_description_schema = Schema({
Required('name'): basestring,
Required('name'): text_type,
Required('description'): task_description_schema['description'],
Required('job-from'): task_description_schema['job-from'],
Required('attributes'): task_description_schema['attributes'],

Просмотреть файл

@ -14,6 +14,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
import json
import six
from six import text_type
import mozpack.path as mozpath
@ -41,8 +43,8 @@ job_description_schema = Schema({
# The name of the job and the job's label. At least one must be specified,
# and the label will be generated from the name if necessary, by prepending
# the kind.
Optional('name'): basestring,
Optional('label'): basestring,
Optional('name'): text_type,
Optional('label'): text_type,
# the following fields are passed directly through to the task description,
# possibly modified by the run implementation. See
@ -78,14 +80,14 @@ job_description_schema = Schema({
# This task only needs to be run if a file matching one of the given
# patterns has changed in the push. The patterns use the mozpack
# match function (python/mozbuild/mozpack/path.py).
Optional('files-changed'): [basestring],
Optional('files-changed'): [text_type],
},
# A list of artifacts to install from 'fetch' tasks.
Optional('fetches'): {
basestring: [basestring, {
Required('artifact'): basestring,
Optional('dest'): basestring,
text_type: [text_type, {
Required('artifact'): text_type,
Optional('dest'): text_type,
Optional('extract'): bool,
}],
},
@ -93,10 +95,10 @@ job_description_schema = Schema({
# A description of how to run this job.
'run': {
# The key to a job implementation in a peer module to this one
'using': basestring,
'using': text_type,
# Base work directory used to set up the task.
Optional('workdir'): basestring,
Optional('workdir'): text_type,
# Any remaining content is verified against that job implementation's
# own schema.
@ -259,7 +261,7 @@ def use_fetches(config, jobs):
prefix = get_artifact_prefix(dep_tasks[0])
for artifact in artifacts:
if isinstance(artifact, basestring):
if isinstance(artifact, text_type):
path = artifact
dest = None
extract = True
@ -293,7 +295,10 @@ def use_fetches(config, jobs):
job["scopes"].append(scope)
env = worker.setdefault('env', {})
env['MOZ_FETCHES'] = {'task-reference': json.dumps(job_fetches, sort_keys=True)}
env['MOZ_FETCHES'] = {
'task-reference': six.ensure_text(json.dumps(job_fetches,
sort_keys=True))
}
# The path is normalized to an absolute path in run-task
env.setdefault('MOZ_FETCHES_DIR', 'fetches')

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import os
import re
from six import text_type
from taskgraph.util.schema import Schema
from voluptuous import Any, Optional, Required
@ -25,19 +26,19 @@ DSC_PACKAGE_RE = re.compile('.*(?=_)')
SOURCE_PACKAGE_RE = re.compile('.*(?=[-_]\d)')
source_definition = {
Required('url'): basestring,
Required('sha256'): basestring,
Required('url'): text_type,
Required('sha256'): text_type,
}
run_schema = Schema({
Required('using'): 'debian-package',
# Debian distribution
Required('dist'): basestring,
Required('dist'): text_type,
# Date of the snapshot (from snapshot.debian.org) to use, in the format
# YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name
# (only the YYYYMMDD part).
Required('snapshot'): basestring,
Required('snapshot'): text_type,
# URL/SHA256 of a source file to build, which can either be a source
# control (.dsc), or a tarball.
@ -46,19 +47,19 @@ run_schema = Schema({
# Package name. Normally derived from the source control or tarball file
# name. Use in case the name doesn't match DSC_PACKAGE_RE or
# SOURCE_PACKAGE_RE.
Optional('name'): basestring,
Optional('name'): text_type,
# Patch to apply to the extracted source.
Optional('patch'): basestring,
Optional('patch'): text_type,
# Command to run before dpkg-buildpackage.
Optional('pre-build-command'): basestring,
Optional('pre-build-command'): text_type,
# Architecture to build the package for.
Optional('arch'): basestring,
Optional('arch'): text_type,
# List of package tasks to get build dependencies from.
Optional('packages'): [basestring],
Optional('packages'): [text_type],
# What resolver to use to install build dependencies. The default
# (apt-get) is good in most cases, but in subtle cases involving
@ -67,7 +68,7 @@ run_schema = Schema({
Optional('resolver'): Any('apt-get', 'aptitude'),
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})

Просмотреть файл

@ -7,6 +7,7 @@ Support for running hazard jobs via dedicated scripts
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.util.schema import Schema
from voluptuous import Required, Optional, Any
@ -25,20 +26,20 @@ haz_run_schema = Schema({
Required('using'): 'hazard',
# The command to run within the task image (passed through to the worker)
Required('command'): basestring,
Required('command'): text_type,
# The mozconfig to use; default in the script is used if omitted
Optional('mozconfig'): basestring,
Optional('mozconfig'): text_type,
# The set of secret names to which the task has access; these are prefixed
# with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
# this will enable any worker features required and set the task's scopes
# appropriately. `true` here means ['*'], all secrets. Not supported on
# Windows
Required('secrets', default=False): Any(bool, [basestring]),
Required('secrets', default=False): Any(bool, [text_type]),
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})

Просмотреть файл

@ -7,6 +7,7 @@ Support for running mach tasks (via run-task)
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
from taskgraph.util.schema import (
Schema,
@ -22,14 +23,14 @@ mach_schema = Schema({
# The sparse checkout profile to use. Value is the filename relative to the
# directory where sparse profiles are defined (build/sparse-profiles/).
Optional('sparse-profile'): Any(basestring, None),
Optional('sparse-profile'): Any(text_type, None),
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout
Required('comm-checkout'): bool,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})

Просмотреть файл

@ -11,6 +11,8 @@ way, and certainly anything using mozharness should use this approach.
from __future__ import absolute_import, print_function, unicode_literals
import json
import six
from six import text_type
from textwrap import dedent
from taskgraph.util.schema import Schema
@ -37,16 +39,16 @@ mozharness_run_schema = Schema({
# the mozharness script used to run this task, relative to the testing/
# directory and using forward slashes even on Windows
Required('script'): basestring,
Required('script'): text_type,
# Additional paths to look for mozharness configs in. These should be
# relative to the base of the source checkout
Optional('config-paths'): [basestring],
Optional('config-paths'): [text_type],
# the config files required for the task, relative to
# testing/mozharness/configs or one of the paths specified in
# `config-paths` and using forward slashes even on Windows
Required('config'): [basestring],
Required('config'): [text_type],
# any additional actions to pass to the mozharness command
Optional('actions'): [Match(
@ -61,14 +63,14 @@ mozharness_run_schema = Schema({
)],
# --custom-build-variant-cfg value
Optional('custom-build-variant-cfg'): basestring,
Optional('custom-build-variant-cfg'): text_type,
# Extra configuration options to pass to mozharness.
Optional('extra-config'): dict,
# Extra metadata to use toward the workspace caching.
# Only supported on docker-worker
Optional('extra-workspace-cache-key'): basestring,
Optional('extra-workspace-cache-key'): text_type,
# If not false, tooltool downloads will be enabled via relengAPIProxy
# for either just public files, or all files. Not supported on Windows
@ -83,7 +85,7 @@ mozharness_run_schema = Schema({
# this will enable any worker features required and set the task's scopes
# appropriately. `true` here means ['*'], all secrets. Not supported on
# Windows
Required('secrets'): Any(bool, [basestring]),
Required('secrets'): Any(bool, [text_type]),
# If true, taskcluster proxy will be enabled; note that it may also be enabled
# automatically e.g., for secrets support. Not supported on Windows.
@ -97,7 +99,7 @@ mozharness_run_schema = Schema({
Required('keep-artifacts'): bool,
# If specified, use the in-tree job script specified.
Optional('job-script'): basestring,
Optional('job-script'): text_type,
Required('requires-signed-builds'): bool,
@ -117,7 +119,7 @@ mozharness_run_schema = Schema({
Required('comm-checkout'): bool,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})
@ -190,7 +192,8 @@ def mozharness_on_docker_worker_setup(config, job, taskdesc):
env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run.pop('custom-build-variant-cfg')
if 'extra-config' in run:
env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run.pop('extra-config'))
env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
json.dumps(run.pop('extra-config')))
if 'job-script' in run:
env['JOB_SCRIPT'] = run['job-script']
@ -276,7 +279,8 @@ def mozharness_on_generic_worker(config, job, taskdesc):
env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'})
if 'extra-config' in run:
env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run.pop('extra-config'))
env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
json.dumps(run.pop('extra-config')))
# The windows generic worker uses batch files to pass environment variables
# to commands. Setting a variable to empty in a batch file unsets, so if

Просмотреть файл

@ -7,6 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import json
import os
import six
from six import text_type
from voluptuous import Required
from taskgraph.util.taskcluster import get_artifact_url
@ -48,7 +50,7 @@ mozharness_test_run_schema = Schema({
Required('using'): 'mozharness-test',
Required('test'): test_description_schema,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})
@ -111,7 +113,7 @@ def mozharness_test_on_docker(config, job, taskdesc):
'NEED_PULSEAUDIO': 'true',
'NEED_WINDOW_MANAGER': 'true',
'NEED_COMPIZ': 'true',
'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
'ENABLE_E10S': text_type(bool(test.get('e10s'))).lower(),
'WORKING_DIR': '/builds/worker',
})
@ -176,7 +178,9 @@ def mozharness_test_on_docker(config, job, taskdesc):
'installer_url': installer_url,
'test_packages_url': test_packages_url(taskdesc),
}
env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
env['EXTRA_MOZHARNESS_CONFIG'] = {
'task-reference': six.ensure_text(json.dumps(extra_config))
}
command = [
'{workdir}/bin/test-linux.sh'.format(**run),
@ -184,7 +188,8 @@ def mozharness_test_on_docker(config, job, taskdesc):
command.extend(mozharness.get('extra-options', []))
if test.get('test-manifests'):
env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
json.dumps({test['suite']: test['test-manifests']}))
# TODO: remove the need for run['chunked']
elif mozharness.get('chunked') or test['chunks'] > 1:
@ -322,7 +327,9 @@ def mozharness_test_on_generic_worker(config, job, taskdesc):
'installer_url': installer_url,
'test_packages_url': test_packages_url(taskdesc),
}
env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
env['EXTRA_MOZHARNESS_CONFIG'] = {
'task-reference': six.ensure_text(json.dumps(extra_config))
}
if is_windows:
mh_command = [
@ -359,7 +366,7 @@ def mozharness_test_on_generic_worker(config, job, taskdesc):
mh_command.extend(['--cfg', cfg_path])
mh_command.extend(mozharness.get('extra-options', []))
if mozharness.get('download-symbols'):
if isinstance(mozharness['download-symbols'], basestring):
if isinstance(mozharness['download-symbols'], text_type):
mh_command.extend(['--download-symbols', mozharness['download-symbols']])
else:
mh_command.extend(['--download-symbols', 'true'])
@ -367,7 +374,8 @@ def mozharness_test_on_generic_worker(config, job, taskdesc):
mh_command.append('--blob-upload-branch=' + config.params['project'])
if test.get('test-manifests'):
env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
json.dumps({test['suite']: test['test-manifests']}))
# TODO: remove the need for run['chunked']
elif mozharness.get('chunked') or test['chunks'] > 1:
@ -475,7 +483,9 @@ def mozharness_test_on_script_engine_autophone(config, job, taskdesc):
'installer_url': installer_url,
'test_packages_url': test_packages_url(taskdesc),
}
env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
env['EXTRA_MOZHARNESS_CONFIG'] = {
'task-reference': six.ensure_text(json.dumps(extra_config))
}
script = 'test-linux.sh'
worker['context'] = config.params.file_url(
@ -488,7 +498,8 @@ def mozharness_test_on_script_engine_autophone(config, job, taskdesc):
command.extend(mozharness.get('extra-options', []))
if test.get('test-manifests'):
env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
json.dumps({test['suite']: test['test-manifests']}))
# TODO: remove the need for run['chunked']
elif mozharness.get('chunked') or test['chunks'] > 1:

Просмотреть файл

@ -7,6 +7,7 @@ Support for running mach python-test tasks (via run-task)
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
from taskgraph.util.schema import Schema
from voluptuous import Required
@ -18,10 +19,10 @@ python_test_schema = Schema({
Required('python-version'): int,
# The subsuite to run
Required('subsuite'): basestring,
Required('subsuite'): text_type,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})

Просмотреть файл

@ -42,7 +42,7 @@ run_task_schema = Schema({
# The sparse checkout profile to use. Value is the filename relative to the
# directory where sparse profiles are defined (build/sparse-profiles/).
Required('sparse-profile'): Any(basestring, None),
Required('sparse-profile'): Any(text_type, None),
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout
@ -54,7 +54,7 @@ run_task_schema = Schema({
Required('command'): Any([taskref_or_string], taskref_or_string),
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
# If not false, tooltool downloads will be enabled via relengAPIProxy
# for either just public files, or all files. Only supported on
@ -131,8 +131,8 @@ def docker_worker_run_task(config, job, taskdesc):
)
)
# dict is for the case of `{'task-reference': basestring}`.
if isinstance(run_command, (basestring, dict)):
# dict is for the case of `{'task-reference': text_type}`.
if isinstance(run_command, (text_type, dict)):
run_command = ['bash', '-cx', run_command]
if run['comm-checkout']:
command.append('--comm-checkout={}/comm'.format(
@ -202,7 +202,7 @@ def generic_worker_run_task(config, job, taskdesc):
)
)
if isinstance(run_command, basestring):
if isinstance(run_command, text_type):
if is_win:
run_command = '"{}"'.format(run_command)
run_command = ['bash', '-cx', run_command]

Просмотреть файл

@ -7,6 +7,7 @@ Support for running spidermonkey jobs via dedicated scripts
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.util.schema import Schema
from voluptuous import Required, Any, Optional
@ -24,11 +25,11 @@ sm_run_schema = Schema({
'spidermonkey-rust-bindings'),
# SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
Required('spidermonkey-variant'): basestring,
Optional('spidermonkey-platform'): basestring,
Required('spidermonkey-variant'): text_type,
Optional('spidermonkey-platform'): text_type,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
Required('tooltool-downloads'): Any(
False,

Просмотреть файл

@ -10,6 +10,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from mozbuild.shellutil import quote as shell_quote
from mozpack import path
from six import text_type
from taskgraph.util.schema import Schema
from voluptuous import Optional, Required, Any
@ -33,10 +34,10 @@ toolchain_run_schema = Schema({
# The script (in taskcluster/scripts/misc) to run.
# Python scripts are invoked with `mach python` so vendored libraries
# are available.
Required('script'): basestring,
Required('script'): text_type,
# Arguments to pass to the script.
Optional('arguments'): [basestring],
Optional('arguments'): [text_type],
# If not false, tooltool downloads will be enabled via relengAPIProxy
# for either just public files, or all files. Not supported on Windows
@ -51,21 +52,21 @@ toolchain_run_schema = Schema({
# "toolchain-build", i.e., to
# `build/sparse-profiles/toolchain-build`. If `None`, instructs
# `run-task` to not use a sparse profile at all.
Required('sparse-profile'): Any(basestring, None),
Required('sparse-profile'): Any(text_type, None),
# Paths/patterns pointing to files that influence the outcome of a
# toolchain build.
Optional('resources'): [basestring],
Optional('resources'): [text_type],
# Path to the artifact produced by the toolchain job
Required('toolchain-artifact'): basestring,
Required('toolchain-artifact'): text_type,
# An alias that can be used instead of the real toolchain job name in
# the toolchains list for build jobs.
Optional('toolchain-alias'): basestring,
Optional('toolchain-alias'): text_type,
# Base work directory used to set up the task.
Required('workdir'): basestring,
Required('workdir'): text_type,
})

Просмотреть файл

@ -8,9 +8,11 @@ Do transforms specific to l10n kind
from __future__ import absolute_import, print_function, unicode_literals
import copy
import io
import json
from mozbuild.chunkify import chunkify
from six import text_type
from taskgraph.loader.multi_dep import schema
from taskgraph.transforms.base import (
TransformSequence,
@ -38,34 +40,34 @@ def _by_platform(arg):
l10n_description_schema = schema.extend({
# Name for this job, inferred from the dependent job before validation
Required('name'): basestring,
Required('name'): text_type,
# build-platform, inferred from dependent job before validation
Required('build-platform'): basestring,
Required('build-platform'): text_type,
# max run time of the task
Required('run-time'): _by_platform(int),
# Locales not to repack for
Required('ignore-locales'): _by_platform([basestring]),
Required('ignore-locales'): _by_platform([text_type]),
# All l10n jobs use mozharness
Required('mozharness'): {
# Script to invoke for mozharness
Required('script'): _by_platform(basestring),
Required('script'): _by_platform(text_type),
# Config files passed to the mozharness script
Required('config'): _by_platform([basestring]),
Required('config'): _by_platform([text_type]),
# Additional paths to look for mozharness configs in. These should be
# relative to the base of the source checkout
Optional('config-paths'): [basestring],
Optional('config-paths'): [text_type],
# Options to pass to the mozharness script
Optional('options'): _by_platform([basestring]),
Optional('options'): _by_platform([text_type]),
# Action commands to provide to mozharness script
Required('actions'): _by_platform([basestring]),
Required('actions'): _by_platform([text_type]),
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout
@ -74,24 +76,24 @@ l10n_description_schema = schema.extend({
# Items for the taskcluster index
Optional('index'): {
# Product to identify as in the taskcluster index
Required('product'): _by_platform(basestring),
Required('product'): _by_platform(text_type),
# Job name to identify as in the taskcluster index
Required('job-name'): _by_platform(basestring),
Required('job-name'): _by_platform(text_type),
# Type of index
Optional('type'): _by_platform(basestring),
Optional('type'): _by_platform(text_type),
},
# Description of the localized task
Required('description'): _by_platform(basestring),
Required('description'): _by_platform(text_type),
Optional('run-on-projects'): job_description_schema['run-on-projects'],
# worker-type to utilize
Required('worker-type'): _by_platform(basestring),
Required('worker-type'): _by_platform(text_type),
# File which contains the used locales
Required('locales-file'): _by_platform(basestring),
Required('locales-file'): _by_platform(text_type),
# Tooltool visibility required for task.
Required('tooltool'): _by_platform(Any('internal', 'public')),
@ -100,12 +102,12 @@ l10n_description_schema = schema.extend({
# -- generally desktop-build or android-build -- for now.
Required('docker-image', default=None): _by_platform(Any(
# an in-tree generated docker image (from `taskcluster/docker/<name>`)
{'in-tree': basestring},
{'in-tree': text_type},
None,
)),
Optional('fetches'): {
basestring: _by_platform([basestring]),
text_type: _by_platform([text_type]),
},
# The set of secret names to which the task has access; these are prefixed
@ -113,33 +115,33 @@ l10n_description_schema = schema.extend({
# this will enable any worker features required and set the task's scopes
# appropriately. `true` here means ['*'], all secrets. Not supported on
# Windows
Required('secrets', default=False): _by_platform(Any(bool, [basestring])),
Required('secrets', default=False): _by_platform(Any(bool, [text_type])),
# Information for treeherder
Required('treeherder'): {
# Platform to display the task on in treeherder
Required('platform'): _by_platform(basestring),
Required('platform'): _by_platform(text_type),
# Symbol to use
Required('symbol'): basestring,
Required('symbol'): text_type,
# Tier this task is
Required('tier'): _by_platform(int),
},
# Extra environment values to pass to the worker
Optional('env'): _by_platform({basestring: taskref_or_string}),
Optional('env'): _by_platform({text_type: taskref_or_string}),
# Max number locales per chunk
Optional('locales-per-chunk'): _by_platform(int),
# Task deps to chain this task with, added in transforms from primary-dependency
# if this is a nightly
Optional('dependencies'): {basestring: basestring},
Optional('dependencies'): {text_type: text_type},
# Run the task when the listed files change (if present).
Optional('when'): {
'files-changed': [basestring]
'files-changed': [text_type]
},
# passed through directly to the job description
@ -159,13 +161,12 @@ def parse_locales_file(locales_file, platform=None):
"""
locales = []
with open(locales_file, mode='r') as f:
with io.open(locales_file, mode='r') as f:
if locales_file.endswith('json'):
all_locales = json.load(f)
# XXX Only single locales are fetched
locales = {
locale: data['revision']
for locale, data in all_locales.items()
locale: data['revision'] for locale, data in all_locales.items()
if platform is None or platform in data['platforms']
}
else:
@ -264,7 +265,7 @@ def handle_artifact_prefix(config, jobs):
for job in jobs:
artifact_prefix = get_artifact_prefix(job)
for k1, v1 in job.get('env', {}).iteritems():
if isinstance(v1, basestring):
if isinstance(v1, text_type):
job['env'][k1] = v1.format(
artifact_prefix=artifact_prefix
)

Просмотреть файл

@ -5,7 +5,7 @@
This transform is used to help populate mozharness options for openh264 jobs
"""
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence

Просмотреть файл

@ -7,6 +7,7 @@ Transform the repackage signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -20,8 +21,8 @@ from voluptuous import Required, Optional
transforms = TransformSequence()
signing_description_schema = schema.extend({
Required('depname', default='repackage'): basestring,
Optional('label'): basestring,
Required('depname', default='repackage'): text_type,
Optional('label'): text_type,
Optional('extra'): object,
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import re
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
@ -20,14 +21,14 @@ from voluptuous import Optional, Required
push_apk_description_schema = Schema({
Required('dependent-tasks'): object,
Required('name'): basestring,
Required('name'): text_type,
Required('label'): task_description_schema['label'],
Required('description'): task_description_schema['description'],
Required('job-from'): task_description_schema['job-from'],
Required('attributes'): task_description_schema['attributes'],
Required('treeherder'): task_description_schema['treeherder'],
Required('run-on-projects'): task_description_schema['run-on-projects'],
Required('worker-type'): optionally_keyed_by('release-level', basestring),
Required('worker-type'): optionally_keyed_by('release-level', text_type),
Required('worker'): object,
Required('scopes'): None,
Required('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform the push-apk-checks kind into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from taskgraph.transforms.push_apk import (
@ -23,15 +24,15 @@ from voluptuous import Required
transforms = TransformSequence()
transforms.add_validate(Schema({
Required('dependent-tasks'): object,
Required('name'): basestring,
Required('name'): text_type,
Required('label'): task_description_schema['label'],
Required('description'): task_description_schema['description'],
Required('job-from'): task_description_schema['job-from'],
Required('attributes'): task_description_schema['attributes'],
Required('treeherder'): task_description_schema['treeherder'],
Required('package-name'): optionally_keyed_by('project', basestring),
Required('package-name'): optionally_keyed_by('project', text_type),
Required('run-on-projects'): task_description_schema['run-on-projects'],
Required('worker-type'): basestring,
Required('worker-type'): text_type,
Required('worker'): object,
Required('shipping-phase'): task_description_schema['shipping-phase'],
Required('shipping-product'): task_description_schema['shipping-product'],

Просмотреть файл

@ -5,6 +5,7 @@
from __future__ import absolute_import, print_function, unicode_literals
from copy import deepcopy
from six import text_type
from voluptuous import (
Any,
@ -25,17 +26,17 @@ raptor_description_schema = Schema({
# Raptor specific configs.
Optional('apps'): optionally_keyed_by(
'test-platform',
[basestring]
[text_type]
),
Optional('raptor-test'): basestring,
Optional('raptor-subtests'): [basestring],
Optional('raptor-test'): text_type,
Optional('raptor-subtests'): [text_type],
Optional('activity'): optionally_keyed_by(
'app',
basestring
text_type
),
Optional('binary-path'): optionally_keyed_by(
'app',
basestring
text_type
),
Optional('pageload'): optionally_keyed_by(
'test-platform', 'app',

Просмотреть файл

@ -7,6 +7,7 @@ Transform the beetmover task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.beetmover import craft_release_properties
@ -31,25 +32,25 @@ transforms = TransformSequence()
beetmover_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# attributes is used for enabling artifact-map by declarative artifacts
Required('attributes'): {basestring: object},
Required('attributes'): {text_type: object},
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Optional('label'): basestring,
Optional('label'): text_type,
# treeherder is allowed here to override any defaults we use for beetmover. See
# taskcluster/taskgraph/transforms/task.py for the schema details, and the
# below transforms for defaults of various values.
Optional('treeherder'): task_description_schema['treeherder'],
Required('description'): basestring,
Required('worker-type'): optionally_keyed_by('release-level', basestring),
Required('description'): text_type,
Required('worker-type'): optionally_keyed_by('release-level', text_type),
Required('run-on-projects'): [],
# locale is passed only for l10n beetmoving
Optional('locale'): basestring,
Optional('locale'): text_type,
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
})

Просмотреть файл

@ -6,6 +6,7 @@ Transform the `release-generate-checksums-beetmover` task to also append `build`
"""
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -23,10 +24,10 @@ transforms = TransformSequence()
release_generate_checksums_beetmover_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this beetmover task, defaults to {dep.label}-beetmover
Optional('label'): basestring,
Optional('label'): text_type,
# treeherder is allowed here to override any defaults we use for beetmover. See
# taskcluster/taskgraph/transforms/task.py for the schema details, and the

Просмотреть файл

@ -7,6 +7,7 @@ Transform the release-generate-checksums-signing task into task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -18,8 +19,8 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
release_generate_checksums_signing_schema = schema.extend({
Required('depname', default='release-generate-checksums'): basestring,
Optional('label'): basestring,
Required('depname', default='release-generate-checksums'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],
@ -48,9 +49,7 @@ def make_release_generate_checksums_signing_description(config, jobs):
label = job.get("label", job_template)
description = "Signing of the overall release-related checksums"
dependencies = {
str(dep_job.kind): dep_job.label
}
dependencies = {dep_job.kind: dep_job.label}
upstream_artifacts = [{
"taskId": {"task-reference": "<{}>".format(str(dep_job.kind))},

Просмотреть файл

@ -7,6 +7,7 @@ Transform the release-sign-and-push task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -18,9 +19,9 @@ from voluptuous import Any, Required
transforms = TransformSequence()
langpack_sign_push_description_schema = schema.extend({
Required('label'): basestring,
Required('description'): basestring,
Required('worker-type'): optionally_keyed_by('release-level', basestring),
Required('label'): text_type,
Required('description'): text_type,
Required('worker-type'): optionally_keyed_by('release-level', text_type),
Required('worker'): {
Required('implementation'): 'push-addons',
Required('channel'): optionally_keyed_by(
@ -30,7 +31,7 @@ langpack_sign_push_description_schema = schema.extend({
},
Required('run-on-projects'): [],
Required('scopes'): optionally_keyed_by('release-level', [basestring]),
Required('scopes'): optionally_keyed_by('release-level', [text_type]),
Required('shipping-phase'): task_description_schema['shipping-phase'],
Required('shipping-product'): task_description_schema['shipping-product'],
})

Просмотреть файл

@ -7,6 +7,7 @@ Transform the release-snap-push kind into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
@ -15,15 +16,15 @@ from taskgraph.util.scriptworker import add_scope_prefix
from voluptuous import Optional, Required
push_snap_description_schema = Schema({
Required('name'): basestring,
Required('name'): text_type,
Required('job-from'): task_description_schema['job-from'],
Required('dependencies'): task_description_schema['dependencies'],
Required('description'): task_description_schema['description'],
Required('treeherder'): task_description_schema['treeherder'],
Required('run-on-projects'): task_description_schema['run-on-projects'],
Required('worker-type'): optionally_keyed_by('release-level', basestring),
Required('worker-type'): optionally_keyed_by('release-level', text_type),
Required('worker'): object,
Optional('scopes'): [basestring],
Optional('scopes'): [text_type],
Required('shipping-phase'): task_description_schema['shipping-phase'],
Required('shipping-product'): task_description_schema['shipping-product'],
Optional('extra'): task_description_schema['extra'],

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -25,12 +26,12 @@ from voluptuous import Required, Optional, Extra
packaging_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the signed things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this repackaging task
Optional('label'): basestring,
Optional('label'): text_type,
Optional('worker-type'): basestring,
Optional('worker-type'): text_type,
Optional('worker'): object,
# treeherder is allowed here to override any defaults we use for repackaging. See
@ -39,10 +40,10 @@ packaging_description_schema = schema.extend({
Optional('treeherder'): job_description_schema['treeherder'],
# If a l10n task, the corresponding locale
Optional('locale'): basestring,
Optional('locale'): text_type,
# Routes specific to this task, if defined
Optional('routes'): [basestring],
Optional('routes'): [text_type],
# passed through directly to the job description
Optional('extra'): job_description_schema['extra'],
@ -55,17 +56,17 @@ packaging_description_schema = schema.extend({
Optional('shipping-phase'): job_description_schema['shipping-phase'],
Required('package-formats'): optionally_keyed_by(
'build-platform', 'release-type', [basestring]),
'build-platform', 'release-type', [text_type]),
# All l10n jobs use mozharness
Required('mozharness'): {
Extra: object,
# Config files passed to the mozharness script
Required('config'): optionally_keyed_by('build-platform', [basestring]),
Required('config'): optionally_keyed_by('build-platform', [text_type]),
# Additional paths to look for mozharness configs in. These should be
# relative to the base of the source checkout
Optional('config-paths'): [basestring],
Optional('config-paths'): [text_type],
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout
@ -288,7 +289,7 @@ def make_job_description(config, jobs):
if locale:
# Make sure we specify the locale-specific upload dir
worker.setdefault('env', {}).update(LOCALE=locale)
worker.setdefault('env', {})['LOCALE'] = locale
worker['artifacts'] = _generate_task_output_files(
dep_job, worker_type_implementation(config.graph_config, worker_type),

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -37,13 +38,13 @@ PACKAGE_FORMATS['installer-stub']['args'].extend(["--package-name", "{package-na
packaging_description_schema = schema.extend({
# depname is used in taskref's to identify the taskID of the signed things
Required('depname', default='build'): basestring,
Required('depname', default='build'): text_type,
# unique label to describe this repackaging task
Optional('label'): basestring,
Optional('label'): text_type,
# Routes specific to this task, if defined
Optional('routes'): [basestring],
Optional('routes'): [text_type],
# passed through directly to the job description
Optional('extra'): task_description_schema['extra'],
@ -52,16 +53,16 @@ packaging_description_schema = schema.extend({
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Required('package-formats'): _by_platform([basestring]),
Required('package-formats'): _by_platform([text_type]),
# All l10n jobs use mozharness
Required('mozharness'): {
# Config files passed to the mozharness script
Required('config'): _by_platform([basestring]),
Required('config'): _by_platform([text_type]),
# Additional paths to look for mozharness configs in. These should be
# relative to the base of the source checkout
Optional('config-paths'): [basestring],
Optional('config-paths'): [text_type],
# if true, perform a checkout of a comm-central based branch inside the
# gecko checkout

Просмотреть файл

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import os
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -19,8 +20,8 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
repackage_signing_description_schema = schema.extend({
Required('depname', default='repackage'): basestring,
Optional('label'): basestring,
Required('depname', default='repackage'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform the repackage signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -21,8 +22,8 @@ from voluptuous import Required, Optional
transforms = TransformSequence()
repackage_signing_description_schema = schema.extend({
Required('depname', default='repackage'): basestring,
Optional('label'): basestring,
Required('depname', default='repackage'): text_type,
Optional('label'): text_type,
Optional('extra'): object,
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -7,6 +7,7 @@ Transform the signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -29,23 +30,23 @@ signing_description_schema = schema.extend({
Required('taskId'): taskref_or_string,
# type of signing task (for CoT)
Required('taskType'): basestring,
Required('taskType'): text_type,
# Paths to the artifacts to sign
Required('paths'): [basestring],
Required('paths'): [text_type],
# Signing formats to use on each of the paths
Required('formats'): [basestring],
Required('formats'): [text_type],
}],
# depname is used in taskref's to identify the taskID of the unsigned things
Required('depname'): basestring,
Required('depname'): text_type,
# attributes for this task
Optional('attributes'): {basestring: object},
Optional('attributes'): {text_type: object},
# unique label to describe this signing task, defaults to {dep.label}-signing
Optional('label'): basestring,
Optional('label'): text_type,
# treeherder is allowed here to override any defaults we use for signing. See
# taskcluster/taskgraph/transforms/task.py for the schema details, and the
@ -53,14 +54,14 @@ signing_description_schema = schema.extend({
Optional('treeherder'): task_description_schema['treeherder'],
# Routes specific to this task, if defined
Optional('routes'): [basestring],
Optional('routes'): [text_type],
Optional('shipping-phase'): task_description_schema['shipping-phase'],
Optional('shipping-product'): task_description_schema['shipping-product'],
# Optional control for how long a task may run (aka maxRunTime)
Optional('max-run-time'): int,
Optional('extra'): {basestring: object},
Optional('extra'): {text_type: object},
# Max number of partner repacks per chunk
Optional('repacks-per-chunk'): int,

Просмотреть файл

@ -6,6 +6,7 @@ Transform the checksums signing task into an actual task description.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
@ -16,8 +17,8 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional
checksums_signing_description_schema = schema.extend({
Required('depname', default='beetmover'): basestring,
Optional('label'): basestring,
Required('depname', default='beetmover'): text_type,
Optional('label'): text_type,
Optional('treeherder'): task_description_schema['treeherder'],
Optional('shipping-product'): task_description_schema['shipping-product'],
Optional('shipping-phase'): task_description_schema['shipping-phase'],

Просмотреть файл

@ -10,6 +10,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
import os
import six
from six import text_type
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.job import job_description_schema
@ -36,7 +38,7 @@ source_test_description_schema = Schema({
# The platform on which this task runs. This will be used to set up attributes
# (for try selection) and treeherder metadata (for display). If given as a list,
# the job will be "split" into multiple tasks, one with each platform.
Required('platform'): Any(basestring, [basestring]),
Required('platform'): Any(text_type, [text_type]),
# Whether the job requires a build artifact or not. If True, the task will
# depend on a build task and the installer url will be saved to the
@ -58,8 +60,8 @@ source_test_description_schema = Schema({
# A list of artifacts to install from 'fetch' tasks.
Optional('fetches'): {
basestring: optionally_keyed_by(
'platform', job_description_schema['fetches'][basestring]),
text_type: optionally_keyed_by(
'platform', job_description_schema['fetches'][text_type]),
},
})
@ -88,7 +90,7 @@ def set_job_name(config, jobs):
@transforms.add
def expand_platforms(config, jobs):
for job in jobs:
if isinstance(job['platform'], basestring):
if isinstance(job['platform'], text_type):
yield job
continue
@ -235,7 +237,7 @@ def add_decision_task_id_to_env(config, jobs):
continue
env = job['worker'].setdefault('env', {})
env['DECISION_TASK_ID'] = os.environ.get('TASK_ID', '')
env['DECISION_TASK_ID'] = six.ensure_text(os.environ.get('TASK_ID', ''))
yield job

Просмотреть файл

@ -68,48 +68,48 @@ def _compute_geckoview_version(app_version, moz_build_date):
# A task description is a general description of a TaskCluster task
task_description_schema = Schema({
# the label for this task
Required('label'): basestring,
Required('label'): text_type,
# description of the task (for metadata)
Required('description'): basestring,
Required('description'): text_type,
# attributes for this task
Optional('attributes'): {basestring: object},
Optional('attributes'): {text_type: object},
# relative path (from config.path) to the file task was defined in
Optional('job-from'): basestring,
Optional('job-from'): text_type,
# dependencies of this task, keyed by name; these are passed through
# verbatim and subject to the interpretation of the Task's get_dependencies
# method.
Optional('dependencies'): {basestring: object},
Optional('dependencies'): {text_type: object},
# Soft dependencies of this task, as a list of tasks labels
Optional('soft-dependencies'): [basestring],
Optional('soft-dependencies'): [text_type],
Optional('requires'): Any('all-completed', 'all-resolved'),
# expiration and deadline times, relative to task creation, with units
# (e.g., "14 days"). Defaults are set based on the project.
Optional('expires-after'): basestring,
Optional('deadline-after'): basestring,
Optional('expires-after'): text_type,
Optional('deadline-after'): text_type,
# custom routes for this task; the default treeherder routes will be added
# automatically
Optional('routes'): [basestring],
Optional('routes'): [text_type],
# custom scopes for this task; any scopes required for the worker will be
# added automatically. The following parameters will be substituted in each
# scope:
# {level} -- the scm level of this push
# {project} -- the project of this push
Optional('scopes'): [basestring],
Optional('scopes'): [text_type],
# Tags
Optional('tags'): {basestring: basestring},
Optional('tags'): {text_type: text_type},
# custom "task.extra" content
Optional('extra'): {basestring: object},
Optional('extra'): {text_type: object},
# treeherder-related information; see
# https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
@ -117,7 +117,7 @@ task_description_schema = Schema({
# added to the task
Optional('treeherder'): {
# either a bare symbol, or "grp(sym)".
'symbol': basestring,
'symbol': text_type,
# the job kind
'kind': Any('build', 'test', 'other'),
@ -135,10 +135,10 @@ task_description_schema = Schema({
# if omitted, the build will not be indexed.
Optional('index'): {
# the name of the product this build produces
'product': basestring,
'product': text_type,
# the names to use for this job in the TaskCluster index
'job-name': basestring,
'job-name': text_type,
# Type of gecko v2 index to use
'type': Any('generic', 'nightly', 'l10n', 'nightly-with-multi-l10n',
@ -169,10 +169,10 @@ task_description_schema = Schema({
# The `run_on_projects` attribute, defaulting to "all". This dictates the
# projects on which this task should be included in the target task set.
# See the attributes documentation for details.
Optional('run-on-projects'): optionally_keyed_by('build-platform', [basestring]),
Optional('run-on-projects'): optionally_keyed_by('build-platform', [text_type]),
# Like `run_on_projects`, `run-on-hg-branches` defaults to "all".
Optional('run-on-hg-branches'): optionally_keyed_by('project', [basestring]),
Optional('run-on-hg-branches'): optionally_keyed_by('project', [text_type]),
# The `shipping_phase` attribute, defaulting to None. This specifies the
# release promotion phase that this task belongs to.
@ -188,7 +188,7 @@ task_description_schema = Schema({
# release promotion product that this task belongs to.
Required('shipping-product'): Any(
None,
basestring
text_type
),
# Coalescing provides the facility for tasks to be superseded by the same
@ -200,7 +200,7 @@ task_description_schema = Schema({
# order to partition tasks into appropriate sets for coalescing. This
# is combined with the project in order to generate a unique coalescing
# key for the coalescing service.
'job-identifier': basestring,
'job-identifier': text_type,
# The minimum amount of time in seconds between two pending tasks with
# the same coalescing key, before the coalescing service will return
@ -226,22 +226,22 @@ task_description_schema = Schema({
# the provisioner-id/worker-type for the task. The following parameters will
# be substituted in this string:
# {level} -- the scm level of this push
'worker-type': basestring,
'worker-type': text_type,
# Whether the job should use sccache compiler caching.
Required('needs-sccache'): bool,
# Set of artifacts relevant to release tasks
Optional('release-artifacts'): [basestring],
Optional('release-artifacts'): [text_type],
# information specific to the worker implementation that will run this task
Optional('worker'): {
Required('implementation'): basestring,
Required('implementation'): text_type,
Extra: object,
},
# Override the default priority for the project
Optional('priority'): basestring,
Optional('priority'): text_type,
})
TC_TREEHERDER_SCHEMA_URL = 'https://github.com/taskcluster/taskcluster-treeherder/' \
@ -402,11 +402,11 @@ def verify_index(config, index):
# generally `desktop-test`, or an image that acts an awful lot like it.
Required('docker-image'): Any(
# a raw Docker image path (repo/image:tag)
basestring,
text_type,
# an in-tree generated docker image (from `taskcluster/docker/<name>`)
{'in-tree': basestring},
{'in-tree': text_type},
# an indexed docker image
{'indexed': basestring},
{'indexed': text_type},
),
# worker features that should be enabled
@ -428,7 +428,7 @@ def verify_index(config, index):
# Caches are often mounted to the same path as Docker volumes. In this
# case, they take precedence over a Docker volume. But a volume still
# needs to be declared for the path.
Optional('volumes'): [basestring],
Optional('volumes'): [text_type],
# caches to set up for the task
Optional('caches'): [{
@ -437,10 +437,10 @@ def verify_index(config, index):
# name of the cache, allowing re-use by subsequent tasks naming the
# same cache
'name': basestring,
'name': text_type,
# location in the task image where the cache will be mounted
'mount-point': basestring,
'mount-point': text_type,
# Whether the cache is not used in untrusted environments
# (like the Try repo).
@ -453,15 +453,15 @@ def verify_index(config, index):
'type': Any('file', 'directory'),
# task image path from which to read artifact
'path': basestring,
'path': text_type,
# name of the produced artifact (root of the names for
# type=directory)
'name': basestring,
'name': text_type,
}],
# environment variables
Required('env'): {basestring: taskref_or_string},
Required('env'): {text_type: taskref_or_string},
# the command to run; if not given, docker-worker will default to the
# command in the docker image
@ -601,7 +601,7 @@ def build_docker_worker_payload(config, task, task_def):
}
payload['artifacts'] = artifacts
if isinstance(worker.get('docker-image'), basestring):
if isinstance(worker.get('docker-image'), text_type):
out_of_tree_image = worker['docker-image']
run_task = run_task or out_of_tree_image.startswith(
'taskcluster/image_builder')
@ -715,10 +715,10 @@ def build_docker_worker_payload(config, task, task_def):
'type': Any('file', 'directory'),
# filesystem path from which to read artifact
'path': basestring,
'path': text_type,
# if not specified, path is used for artifact name
Optional('name'): basestring
Optional('name'): text_type
}],
# Directories and/or files to be mounted.
@ -728,7 +728,7 @@ def build_docker_worker_payload(config, task, task_def):
Optional('mounts'): [{
# A unique name for the cache volume, implies writable cache directory
# (otherwise mount is a read-only file or directory).
Optional('cache-name'): basestring,
Optional('cache-name'): text_type,
# Optional content for pre-loading cache, or mandatory content for
# read-only file or directory. Pre-loaded content can come from either
# a task artifact or from a URL.
@ -737,12 +737,12 @@ def build_docker_worker_payload(config, task, task_def):
# *** Either (artifact and task-id) or url must be specified. ***
# Artifact name that contains the content.
Optional('artifact'): basestring,
Optional('artifact'): text_type,
# Task ID that has the artifact that contains the content.
Optional('task-id'): taskref_or_string,
# URL that supplies the content in response to an unauthenticated
# GET request.
Optional('url'): basestring
Optional('url'): text_type
},
# *** Either file or directory must be specified. ***
@ -750,10 +750,10 @@ def build_docker_worker_payload(config, task, task_def):
# If mounting a cache or read-only directory, the filesystem location of
# the directory should be specified as a relative path to the task
# directory here.
Optional('directory'): basestring,
Optional('directory'): text_type,
# If mounting a file, specify the relative path within the task
# directory to mount the file (the file will be read only).
Optional('file'): basestring,
Optional('file'): text_type,
# Required if and only if `content` is specified and mounting a
# directory (not a file). This should be the archive format of the
# content (either pre-loaded cache or read-only directory).
@ -761,13 +761,13 @@ def build_docker_worker_payload(config, task, task_def):
}],
# environment variables
Required('env'): {basestring: taskref_or_string},
Required('env'): {text_type: taskref_or_string},
# the maximum time to run, in seconds
Required('max-run-time'): int,
# os user groups for test task workers
Optional('os-groups'): [basestring],
Optional('os-groups'): [text_type],
# feature for test task to run as administarotr
Optional('run-as-administrator'): bool,
@ -899,20 +899,20 @@ def build_generic_worker_payload(config, task, task_def):
Required('taskId'): taskref_or_string,
# type of signing task (for CoT)
Required('taskType'): basestring,
Required('taskType'): text_type,
# Paths to the artifacts to sign
Required('paths'): [basestring],
Required('paths'): [text_type],
# Signing formats to use on each of the paths
Required('formats'): [basestring],
Required('formats'): [text_type],
}],
# behavior for mac iscript
Optional('mac-behavior'): Any(
"mac_notarize", "mac_sign", "mac_sign_and_pkg", "mac_geckodriver",
),
Optional('entitlements-url'): basestring,
Optional('entitlements-url'): text_type,
})
def build_scriptworker_signing_payload(config, task, task_def):
worker = task['worker']
@ -941,17 +941,17 @@ def build_scriptworker_signing_payload(config, task, task_def):
Required('max-run-time', default=600): int,
# locale key, if this is a locale beetmover job
Optional('locale'): basestring,
Optional('locale'): text_type,
Optional('partner-public'): bool,
Required('release-properties'): {
'app-name': basestring,
'app-version': basestring,
'branch': basestring,
'build-id': basestring,
'hash-type': basestring,
'platform': basestring,
'app-name': text_type,
'app-version': text_type,
'branch': text_type,
'build-id': text_type,
'hash-type': text_type,
'platform': text_type,
},
# list of artifact URLs for the artifacts that should be beetmoved
@ -960,13 +960,13 @@ def build_scriptworker_signing_payload(config, task, task_def):
Required('taskId'): taskref_or_string,
# type of signing task (for CoT)
Required('taskType'): basestring,
Required('taskType'): text_type,
# Paths to the artifacts to sign
Required('paths'): [basestring],
Required('paths'): [text_type],
# locale is used to map upload path and allow for duplicate simple names
Required('locale'): basestring,
Required('locale'): text_type,
}],
Optional('artifact-map'): object,
})
@ -1001,7 +1001,7 @@ def build_beetmover_payload(config, task, task_def):
@payload_builder('beetmover-push-to-release', schema={
# the maximum time to run, in seconds
Required('max-run-time'): int,
Required('product'): basestring,
Required('product'): text_type,
})
def build_beetmover_push_to_release_payload(config, task, task_def):
worker = task['worker']
@ -1020,19 +1020,19 @@ def build_beetmover_push_to_release_payload(config, task, task_def):
@payload_builder('beetmover-maven', schema={
Required('max-run-time', default=600): int,
Required('release-properties'): {
'app-name': basestring,
'app-version': basestring,
'branch': basestring,
'build-id': basestring,
'artifact-id': basestring,
'hash-type': basestring,
'platform': basestring,
'app-name': text_type,
'app-version': text_type,
'branch': text_type,
'build-id': text_type,
'artifact-id': text_type,
'hash-type': text_type,
'platform': text_type,
},
Required('upstream-artifacts'): [{
Required('taskId'): taskref_or_string,
Required('taskType'): basestring,
Required('paths'): [basestring],
Required('taskType'): text_type,
Required('paths'): [text_type],
Required('zipExtract', default=False): bool,
}],
Optional('artifact-map'): object,
@ -1050,21 +1050,21 @@ def build_beetmover_maven_payload(config, task, task_def):
@payload_builder('balrog', schema={
Required('balrog-action'): Any(*BALROG_ACTIONS),
Optional('product'): basestring,
Optional('platforms'): [basestring],
Optional('release-eta'): basestring,
Optional('channel-names'): optionally_keyed_by('release-type', [basestring]),
Optional('product'): text_type,
Optional('platforms'): [text_type],
Optional('release-eta'): text_type,
Optional('channel-names'): optionally_keyed_by('release-type', [text_type]),
Optional('require-mirrors'): bool,
Optional('publish-rules'): optionally_keyed_by('release-type', 'release-level', [int]),
Optional('rules-to-update'): optionally_keyed_by(
'release-type', 'release-level', [basestring]),
Optional('archive-domain'): optionally_keyed_by('release-level', basestring),
Optional('download-domain'): optionally_keyed_by('release-level', basestring),
Optional('blob-suffix'): basestring,
Optional('complete-mar-filename-pattern'): basestring,
Optional('complete-mar-bouncer-product-pattern'): basestring,
'release-type', 'release-level', [text_type]),
Optional('archive-domain'): optionally_keyed_by('release-level', text_type),
Optional('download-domain'): optionally_keyed_by('release-level', text_type),
Optional('blob-suffix'): text_type,
Optional('complete-mar-filename-pattern'): text_type,
Optional('complete-mar-bouncer-product-pattern'): text_type,
Optional('update-line'): object,
Optional('suffixes'): [basestring],
Optional('suffixes'): [text_type],
Optional('background-rate'): optionally_keyed_by(
'release-type', 'beta-number', Any(int, None)),
Optional('force-fallback-mapping-update'): optionally_keyed_by(
@ -1077,10 +1077,10 @@ def build_beetmover_maven_payload(config, task, task_def):
Required('taskId'): taskref_or_string,
# type of signing task (for CoT)
Required('taskType'): basestring,
Required('taskType'): text_type,
# Paths to the artifacts to sign
Required('paths'): [basestring],
Required('paths'): [text_type],
}],
})
def build_balrog_payload(config, task, task_def):
@ -1154,7 +1154,7 @@ def build_bouncer_aliases_payload(config, task, task_def):
@payload_builder('bouncer-locations', schema={
Required('implementation'): 'bouncer-locations',
Required('bouncer-products'): [basestring],
Required('bouncer-products'): [text_type],
})
def build_bouncer_locations_payload(config, task, task_def):
worker = task['worker']
@ -1168,7 +1168,7 @@ def build_bouncer_locations_payload(config, task, task_def):
@payload_builder('bouncer-submission', schema={
Required('locales'): [basestring],
Required('locales'): [text_type],
Required('entries'): object,
})
def build_bouncer_submission_payload(config, task, task_def):
@ -1183,8 +1183,8 @@ def build_bouncer_submission_payload(config, task, task_def):
@payload_builder('push-apk', schema={
Required('upstream-artifacts'): [{
Required('taskId'): taskref_or_string,
Required('taskType'): basestring,
Required('paths'): [basestring],
Required('taskType'): text_type,
Required('paths'): [text_type],
Optional('optional', default=False): bool,
}],
@ -1207,11 +1207,11 @@ def build_push_apk_payload(config, task, task_def):
@payload_builder('push-snap', schema={
Required('channel'): basestring,
Required('channel'): text_type,
Required('upstream-artifacts'): [{
Required('taskId'): taskref_or_string,
Required('taskType'): basestring,
Required('paths'): [basestring],
Required('taskType'): text_type,
Required('paths'): [text_type],
}],
})
def build_push_snap_payload(config, task, task_def):
@ -1224,7 +1224,7 @@ def build_push_snap_payload(config, task, task_def):
@payload_builder('shipit-shipped', schema={
Required('release-name'): basestring,
Required('release-name'): text_type,
})
def build_ship_it_shipped_payload(config, task, task_def):
worker = task['worker']
@ -1235,7 +1235,7 @@ def build_ship_it_shipped_payload(config, task, task_def):
@payload_builder('shipit-maybe-release', schema={
Required('phase'): basestring,
Required('phase'): text_type,
})
def build_ship_it_maybe_release_payload(config, task, task_def):
# expect branch name, including path
@ -1256,8 +1256,8 @@ def build_ship_it_maybe_release_payload(config, task, task_def):
Required('channel'): Any('listed', 'unlisted'),
Required('upstream-artifacts'): [{
Required('taskId'): taskref_or_string,
Required('taskType'): basestring,
Required('paths'): [basestring],
Required('taskType'): text_type,
Required('paths'): [text_type],
}],
})
def build_push_addons_payload(config, task, task_def):
@ -1272,23 +1272,23 @@ def build_push_addons_payload(config, task, task_def):
@payload_builder('treescript', schema={
Required('tags'): [Any('buildN', 'release', None)],
Required('bump'): bool,
Optional('bump-files'): [basestring],
Optional('repo-param-prefix'): basestring,
Optional('bump-files'): [text_type],
Optional('repo-param-prefix'): text_type,
Optional('dontbuild'): bool,
Optional('ignore-closed-tree'): bool,
Required('force-dry-run', default=True): bool,
Required('push', default=False): bool,
Optional('source-repo'): basestring,
Optional('source-repo'): text_type,
Optional('l10n-bump-info'): {
Required('name'): basestring,
Required('path'): basestring,
Required('version-path'): basestring,
Optional('revision-url'): basestring,
Required('name'): text_type,
Required('path'): text_type,
Required('version-path'): text_type,
Optional('revision-url'): text_type,
Optional('ignore-config'): object,
Required('platform-configs'): [{
Required('platforms'): [basestring],
Required('path'): basestring,
Optional('format'): basestring,
Required('platforms'): [text_type],
Required('path'): text_type,
Optional('format'): text_type,
}],
},
})
@ -1373,7 +1373,7 @@ def build_dummy_payload(config, task, task_def):
Required('os'): Any('macosx', 'linux'),
# A link for an executable to download
Optional('context'): basestring,
Optional('context'): text_type,
# Tells the worker whether machine should reboot
# after the task is finished.
@ -1384,7 +1384,7 @@ def build_dummy_payload(config, task, task_def):
Optional('command'): [taskref_or_string],
# environment variables
Optional('env'): {basestring: taskref_or_string},
Optional('env'): {text_type: taskref_or_string},
# artifacts to extract from the task image after completion
Optional('artifacts'): [{
@ -1392,11 +1392,11 @@ def build_dummy_payload(config, task, task_def):
Required('type'): Any('file', 'directory'),
# task image path from which to read artifact
Required('path'): basestring,
Required('path'): text_type,
# name of the produced artifact (root of the names for
# type=directory)
Required('name'): basestring,
Required('name'): text_type,
}],
})
def build_script_engine_autophone_payload(config, task, task_def):
@ -1955,7 +1955,7 @@ def build_task(config, tasks):
attributes['always_target'] = task['always-target']
# This logic is here since downstream tasks don't always match their
# upstream dependency's shipping_phase.
# A basestring task['shipping-phase'] takes precedence, then
# A text_type task['shipping-phase'] takes precedence, then
# an existing attributes['shipping_phase'], then fall back to None.
if task.get('shipping-phase') is not None:
attributes['shipping_phase'] = task['shipping-phase']
@ -2093,7 +2093,7 @@ def check_run_task_caches(config, tasks):
payload = task['task'].get('payload', {})
command = payload.get('command') or ['']
main_command = command[0] if isinstance(command[0], basestring) else ''
main_command = command[0] if isinstance(command[0], text_type) else ''
run_task = main_command.endswith('run-task')
require_sparse_cache = False
@ -2101,7 +2101,7 @@ def check_run_task_caches(config, tasks):
if run_task:
for arg in command[1:]:
if not isinstance(arg, basestring):
if not isinstance(arg, text_type):
continue
if arg == '--':

Просмотреть файл

@ -21,6 +21,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
from six import text_type
from mozbuild.schedules import INCLUSIVE_COMPONENTS
from moztest.resolve import TEST_SUITES
@ -214,42 +215,42 @@ transforms = TransformSequence()
# *****WARNING*****
test_description_schema = Schema({
# description of the suite, for the task metadata
'description': basestring,
'description': text_type,
# test suite category and name
Optional('suite'): Any(
basestring,
{Optional('category'): basestring, Optional('name'): basestring},
text_type,
{Optional('category'): text_type, Optional('name'): text_type},
),
# base work directory used to set up the task.
Optional('workdir'): optionally_keyed_by(
'test-platform',
Any(basestring, 'default')),
Any(text_type, 'default')),
# the name by which this test suite is addressed in try syntax; defaults to
# the test-name. This will translate to the `unittest_try_name` or
# `talos_try_name` attribute.
Optional('try-name'): basestring,
Optional('try-name'): text_type,
# additional tags to mark up this type of test
Optional('tags'): {basestring: object},
Optional('tags'): {text_type: object},
# the symbol, or group(symbol), under which this task should appear in
# treeherder.
'treeherder-symbol': basestring,
'treeherder-symbol': text_type,
# the value to place in task.extra.treeherder.machine.platform; ideally
# this is the same as build-platform, and that is the default, but in
# practice it's not always a match.
Optional('treeherder-machine-platform'): basestring,
Optional('treeherder-machine-platform'): text_type,
# attributes to appear in the resulting task (later transforms will add the
# common attributes)
Optional('attributes'): {basestring: object},
Optional('attributes'): {text_type: object},
# relative path (from config.path) to the file task was defined in
Optional('job-from'): basestring,
Optional('job-from'): text_type,
# The `run_on_projects` attribute, defaulting to "all". This dictates the
# projects on which this task should be included in the target task set.
@ -260,14 +261,14 @@ test_description_schema = Schema({
# that are built.
Optional('run-on-projects'): optionally_keyed_by(
'test-platform',
Any([basestring], 'built-projects')),
Any([text_type], 'built-projects')),
# Same as `run-on-projects` except it only applies to Fission tasks. Fission
# tasks will ignore `run_on_projects` and non-Fission tasks will ignore
# `fission-run-on-projects`.
Optional('fission-run-on-projects'): optionally_keyed_by(
'test-platform',
Any([basestring], 'built-projects')),
Any([text_type], 'built-projects')),
# the sheriffing tier for this task (default: set based on test platform)
Optional('tier'): optionally_keyed_by(
@ -289,7 +290,7 @@ test_description_schema = Schema({
# the time (with unit) after which this task is deleted; default depends on
# the branch (see below)
Optional('expires-after'): basestring,
Optional('expires-after'): text_type,
# The different configurations that should be run against this task, defined
# in the TEST_VARIANTS object.
@ -337,11 +338,11 @@ test_description_schema = Schema({
'test-platform',
Any(
# a raw Docker image path (repo/image:tag)
basestring,
text_type,
# an in-tree generated docker image (from `taskcluster/docker/<name>`)
{'in-tree': basestring},
{'in-tree': text_type},
# an indexed docker image
{'indexed': basestring},
{'indexed': text_type},
)
),
@ -366,29 +367,29 @@ test_description_schema = Schema({
# the mozharness script used to run this task
Required('script'): optionally_keyed_by(
'test-platform',
basestring),
text_type),
# the config files required for the task
Required('config'): optionally_keyed_by(
'test-platform',
[basestring]),
[text_type]),
# mochitest flavor for mochitest runs
Optional('mochitest-flavor'): basestring,
Optional('mochitest-flavor'): text_type,
# any additional actions to pass to the mozharness command
Optional('actions'): [basestring],
Optional('actions'): [text_type],
# additional command-line options for mozharness, beyond those
# automatically added
Required('extra-options'): optionally_keyed_by(
'test-platform',
[basestring]),
[text_type]),
# the artifact name (including path) to test on the build task; this is
# generally set in a per-kind transformation
Optional('build-artifact-name'): basestring,
Optional('installer-url'): basestring,
Optional('build-artifact-name'): text_type,
Optional('installer-url'): text_type,
# If not false, tooltool downloads will be enabled via relengAPIProxy
# for either just public files, or all files. Not supported on Windows
@ -423,7 +424,7 @@ test_description_schema = Schema({
},
# The set of test manifests to run.
Optional('test-manifests'): [basestring],
Optional('test-manifests'): [text_type],
# The current chunk (if chunking is enabled).
Optional('this-chunk'): int,
@ -432,7 +433,7 @@ test_description_schema = Schema({
# added automatically
Optional('os-groups'): optionally_keyed_by(
'test-platform',
[basestring]),
[text_type]),
Optional('run-as-administrator'): optionally_keyed_by(
'test-platform',
@ -441,37 +442,37 @@ test_description_schema = Schema({
# -- values supplied by the task-generation infrastructure
# the platform of the build this task is testing
'build-platform': basestring,
'build-platform': text_type,
# the label of the build task generating the materials to test
'build-label': basestring,
'build-label': text_type,
# the label of the signing task generating the materials to test.
# Signed builds are used in xpcshell tests on Windows, for instance.
Optional('build-signing-label'): basestring,
Optional('build-signing-label'): text_type,
# the build's attributes
'build-attributes': {basestring: object},
'build-attributes': {text_type: object},
# the platform on which the tests will run
'test-platform': basestring,
'test-platform': text_type,
# limit the test-platforms (as defined in test-platforms.yml)
# that the test will run on
Optional('limit-platforms'): optionally_keyed_by(
'app',
[basestring]
[text_type]
),
# the name of the test (the key in tests.yml)
'test-name': basestring,
'test-name': text_type,
# the product name, defaults to firefox
Optional('product'): basestring,
Optional('product'): text_type,
# conditional files to determine when these tests should be run
Exclusive(Optional('when'), 'optimization'): {
Optional('files-changed'): [basestring],
Optional('files-changed'): [text_type],
},
# Optimization to perform on this task during the optimization phase.
@ -480,11 +481,11 @@ test_description_schema = Schema({
# The SCHEDULES component for this task; this defaults to the suite
# (not including the flavor) but can be overridden here.
Exclusive(Optional('schedules-component'), 'optimization'): basestring,
Exclusive(Optional('schedules-component'), 'optimization'): text_type,
Optional('worker-type'): optionally_keyed_by(
'test-platform',
Any(basestring, None),
Any(text_type, None),
),
Optional(
@ -498,12 +499,12 @@ test_description_schema = Schema({
# or target.zip (Windows).
Optional('target'): optionally_keyed_by(
'test-platform',
Any(basestring, None, {'index': basestring, 'name': basestring}),
Any(text_type, None, {'index': text_type, 'name': text_type}),
),
# A list of artifacts to install from 'fetch' tasks.
Optional('fetches'): {
basestring: optionally_keyed_by('test-platform', [basestring])
text_type: optionally_keyed_by('test-platform', [text_type])
},
}, required=True)
@ -635,7 +636,7 @@ def handle_suite_category(config, tests):
for test in tests:
test.setdefault('suite', {})
if isinstance(test['suite'], basestring):
if isinstance(test['suite'], text_type):
test['suite'] = {'name': test['suite']}
suite = test['suite'].setdefault('name', test['test-name'])

Просмотреть файл

@ -25,9 +25,8 @@ def add_command(config, tasks):
for task in tasks:
config_task = config_tasks[task['name']]
total_chunks = task["extra"]["chunks"]
task['worker'].setdefault('env', {}).update(
CHANNEL=config_task.task['extra']['channel'],
)
task['worker'].setdefault('env', {})['CHANNEL'] = (
config_task.task['extra']['channel'])
task.setdefault('fetches', {})[config_task.label] = [
"update-verify.cfg",
]

Просмотреть файл

@ -10,6 +10,7 @@ import os
import re
import requests
import requests_unixsocket
import six
import sys
import urllib
import urlparse
@ -169,7 +170,8 @@ class VoidWriter(object):
def generate_context_hash(topsrcdir, image_path, image_name, args=None):
"""Generates a sha256 hash for context directory used to build an image."""
return stream_context_tar(topsrcdir, image_path, VoidWriter(), image_name, args)
return stream_context_tar(
topsrcdir, image_path, VoidWriter(), image_name, args)
class HashingWriter(object):
@ -184,7 +186,7 @@ class HashingWriter(object):
self._writer.write(buf)
def hexdigest(self):
return self._hash.hexdigest()
return six.ensure_text(self._hash.hexdigest())
def create_context_tar(topsrcdir, context_dir, out_path, prefix, args=None):

Просмотреть файл

@ -7,6 +7,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import requests
import six
import subprocess
from redo import retry
@ -48,11 +49,11 @@ def find_hg_revision_push_info(repository, revision):
def get_hg_revision_branch(root, revision):
"""Given the parameters for a revision, find the hg_branch (aka
relbranch) of the revision."""
return subprocess.check_output([
return six.ensure_text(subprocess.check_output([
'hg', 'identify',
'-T', '{branch}',
'--rev', revision,
], cwd=root)
], cwd=root, universal_newlines=True))
# For these functions, we assume that run-task has correctly checked out the

Просмотреть файл

@ -56,7 +56,7 @@ def optionally_keyed_by(*arguments):
for _ in arguments:
options = [schema]
for field in fields:
options.append({'by-' + field: {basestring: schema}})
options.append({'by-' + field: {text_type: schema}})
schema = voluptuous.Any(*options)
return schema
@ -142,9 +142,9 @@ def check_schema(schema):
def iter(path, sch):
def check_identifier(path, k):
if k in (basestring, text_type, voluptuous.Extra):
if k in (text_type, text_type, voluptuous.Extra):
pass
elif isinstance(k, basestring):
elif isinstance(k, text_type):
if not identifier_re.match(k) and not whitelisted(path):
raise RuntimeError(
'YAML schemas should use dashed lower-case identifiers, '
@ -198,11 +198,11 @@ OptimizationSchema = voluptuous.Any(
None,
# search the index for the given index namespaces, and replace this task if found
# the search occurs in order, with the first match winning
{'index-search': [basestring]},
{'index-search': [text_type]},
# consult SETA and skip this task if it is low-value
{'seta': None},
# skip this task if none of the given file patterns match
{'skip-unless-changed': [basestring]},
{'skip-unless-changed': [text_type]},
# skip this task if unless the change files' SCHEDULES contains any of these components
{'skip-unless-schedules': list(schedules.ALL_COMPONENTS)},
# optimize strategy aliases for the test kind
@ -213,7 +213,7 @@ OptimizationSchema = voluptuous.Any(
# shortcut for a string where task references are allowed
taskref_or_string = voluptuous.Any(
basestring,
{voluptuous.Required('task-reference'): basestring},
{voluptuous.Required('artifact-reference'): basestring},
text_type,
{voluptuous.Required('task-reference'): text_type},
{voluptuous.Required('artifact-reference'): text_type},
)

Просмотреть файл

@ -357,10 +357,10 @@ def get_release_config(config):
if release_config['partial_versions'] == "{}":
del release_config['partial_versions']
release_config['version'] = str(config.params['version'])
release_config['appVersion'] = str(config.params['app_version'])
release_config['version'] = config.params['version']
release_config['appVersion'] = config.params['app_version']
release_config['next_version'] = str(config.params['next_version'])
release_config['next_version'] = config.params['next_version']
release_config['build_number'] = config.params['build_number']
return release_config

Просмотреть файл

@ -10,6 +10,7 @@ import os
import datetime
import functools
import requests
import six
import logging
import taskcluster_urls as liburls
from mozbuild.util import memoize
@ -39,7 +40,7 @@ def get_root_url(use_proxy):
is not set."""
if use_proxy:
try:
return os.environ['TASKCLUSTER_PROXY_URL']
return six.ensure_text(os.environ['TASKCLUSTER_PROXY_URL'])
except KeyError:
if 'TASK_ID' not in os.environ:
raise RuntimeError(
@ -57,7 +58,7 @@ def get_root_url(use_proxy):
logger.debug('Running in Taskcluster instance {}{}'.format(
os.environ['TASKCLUSTER_ROOT_URL'],
' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
return os.environ['TASKCLUSTER_ROOT_URL']
return six.ensure_text(os.environ['TASKCLUSTER_ROOT_URL'])
@memoize
@ -107,7 +108,7 @@ def _handle_artifact(path, response):
def get_artifact_url(task_id, path, use_proxy=False):
artifact_tmpl = liburls.api(get_root_url(False), 'queue', 'v1',
'task/{}/artifacts/{}')
data = artifact_tmpl.format(task_id, path)
data = six.ensure_text(artifact_tmpl.format(task_id, path))
if use_proxy:
# Until Bug 1405889 is deployed, we can't download directly
# from the taskcluster-proxy. Work around by using the /bewit
@ -118,7 +119,7 @@ def get_artifact_url(task_id, path, use_proxy=False):
os.environ['TASKCLUSTER_PROXY_URL'] + '/bewit',
data=data,
allow_redirects=False)
return response.text
return six.ensure_text(response.text)
return data

Просмотреть файл

@ -9,6 +9,7 @@ import json
import hashlib
import os
import shutil
import six
import sqlite3
import subprocess
import requests
@ -379,7 +380,8 @@ def run(try_config={}, full=False, parameters=None, push=True, message='{msg}',
print('Found ' + test_count_message)
# Set the test paths to be run by setting MOZHARNESS_TEST_PATHS.
path_env = {'MOZHARNESS_TEST_PATHS': json.dumps(resolve_tests_by_suite(test_files))}
path_env = {'MOZHARNESS_TEST_PATHS': six.ensure_text(
json.dumps(resolve_tests_by_suite(test_files)))}
try_config.setdefault('env', {}).update(path_env)
# Build commit message.

Просмотреть файл

@ -11,6 +11,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import json
import os
import six
import subprocess
import sys
from abc import ABCMeta, abstractmethod, abstractproperty
@ -171,7 +172,8 @@ class Path(TryConfig):
paths = [mozpath.relpath(mozpath.join(os.getcwd(), p), build.topsrcdir) for p in paths]
return {
'env': {
'MOZHARNESS_TEST_PATHS': json.dumps(resolve_tests_by_suite(paths)),
'MOZHARNESS_TEST_PATHS': six.ensure_text(
json.dumps(resolve_tests_by_suite(paths))),
}
}