Backed out changeset 3755692f8d5f (bug 1649987) as per glandium req

This commit is contained in:
Narcis Beleuzu 2020-08-19 10:32:09 +03:00
Родитель cc32927985
Коммит 6cc48251bb
9 изменённых файлов: 24 добавлений и 84 удалений

Просмотреть файл

@ -641,19 +641,3 @@ mac-notarization:
production: security/mac/hardenedruntime/production.entitlements.xml production: security/mac/hardenedruntime/production.entitlements.xml
default: security/mac/hardenedruntime/developer.entitlements.xml default: security/mac/hardenedruntime/developer.entitlements.xml
default: '' default: ''
expiration-policies:
by-project:
try:
default: 1 month
medium: 1 month
long: 1 month
autoland:
default: 1 month
medium: 1 year
# To avoid keeping shippable builds for over a year
long: 1 year
default:
default: 3 months
medium: 1 year
long: 3 years

Просмотреть файл

@ -121,7 +121,6 @@ graph_config_schema = Schema({
): text_type, ): text_type,
Optional('decision-parameters'): text_type, Optional('decision-parameters'): text_type,
}, },
Required("expiration-policies"): optionally_keyed_by("project", {text_type: text_type}),
}) })

Просмотреть файл

@ -193,16 +193,3 @@ def enable_full_crashsymbols(config, jobs):
logger.debug("Disabling full symbol generation for %s", job['name']) logger.debug("Disabling full symbol generation for %s", job['name'])
job['attributes'].pop('enable-full-crashsymbols', None) job['attributes'].pop('enable-full-crashsymbols', None)
yield job yield job
@transforms.add
def set_expiry(config, jobs):
for job in jobs:
attributes = job['attributes']
if "shippable" in attributes and attributes["shippable"] and config.kind in {"build", }:
expiration_policy = "long"
else:
expiration_policy = "medium"
job['expiration-policy'] = expiration_policy
yield job

Просмотреть файл

@ -179,7 +179,6 @@ def make_task(config, jobs):
'type': 'directory', 'type': 'directory',
'name': artifact_prefix, 'name': artifact_prefix,
'path': '/builds/worker/artifacts', 'path': '/builds/worker/artifacts',
'expires-after': expires,
}], }],
}, },
} }

Просмотреть файл

@ -56,7 +56,6 @@ job_description_schema = Schema({
Optional('soft-dependencies'): task_description_schema['soft-dependencies'], Optional('soft-dependencies'): task_description_schema['soft-dependencies'],
Optional('requires'): task_description_schema['requires'], Optional('requires'): task_description_schema['requires'],
Optional('expires-after'): task_description_schema['expires-after'], Optional('expires-after'): task_description_schema['expires-after'],
Optional('expiration-policy'): task_description_schema['expiration-policy'],
Optional('routes'): task_description_schema['routes'], Optional('routes'): task_description_schema['routes'],
Optional('scopes'): task_description_schema['scopes'], Optional('scopes'): task_description_schema['scopes'],
Optional('tags'): task_description_schema['tags'], Optional('tags'): task_description_schema['tags'],

Просмотреть файл

@ -10,7 +10,6 @@ consistency.
from __future__ import absolute_import, print_function, unicode_literals from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.util.taskcluster import get_artifact_prefix from taskgraph.util.taskcluster import get_artifact_prefix
from taskgraph.util.keyed_by import evaluate_keyed_by
SECRET_SCOPE = 'secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}' SECRET_SCOPE = 'secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}'
@ -51,12 +50,11 @@ def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
def add_artifacts(config, job, taskdesc, path): def add_artifacts(config, job, taskdesc, path):
a = { taskdesc['worker'].setdefault('artifacts', []).append({
'name': get_artifact_prefix(taskdesc), 'name': get_artifact_prefix(taskdesc),
'path': path, 'path': path,
'type': 'directory', 'type': 'directory',
} })
taskdesc['worker'].setdefault('artifacts', []).append(a)
def docker_worker_add_artifacts(config, job, taskdesc): def docker_worker_add_artifacts(config, job, taskdesc):
@ -224,11 +222,3 @@ def add_tooltool(config, job, taskdesc, internal=False):
taskdesc['scopes'].extend([ taskdesc['scopes'].extend([
'project:releng:services/tooltool/api/download/internal', 'project:releng:services/tooltool/api/download/internal',
]) ])
def get_expiration(config, policy='default'):
expires = evaluate_keyed_by(
config.graph_config["expiration-policies"], 'artifact expiration',
{'project': config.params['project']}
)[policy]
return expires

Просмотреть файл

@ -29,7 +29,6 @@ from taskgraph.transforms.job.common import (
setup_secrets, setup_secrets,
docker_worker_add_artifacts, docker_worker_add_artifacts,
generic_worker_add_artifacts, generic_worker_add_artifacts,
get_expiration,
) )
from taskgraph.transforms.task import ( from taskgraph.transforms.task import (
get_branch_repo, get_branch_repo,
@ -156,8 +155,7 @@ def mozharness_on_docker_worker_setup(config, job, taskdesc):
worker.setdefault('artifacts', []).append({ worker.setdefault('artifacts', []).append({
'name': 'public/logs', 'name': 'public/logs',
'path': '{workdir}/logs/'.format(**run), 'path': '{workdir}/logs/'.format(**run),
'type': 'directory', 'type': 'directory'
'expires-after': get_expiration(config, 'medium'),
}) })
worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None) worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None)
docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_artifacts(config, job, taskdesc)
@ -260,8 +258,7 @@ def mozharness_on_generic_worker(config, job, taskdesc):
taskdesc['worker'].setdefault('artifacts', []).append({ taskdesc['worker'].setdefault('artifacts', []).append({
'name': 'public/logs', 'name': 'public/logs',
'path': 'logs', 'path': 'logs',
'type': 'directory', 'type': 'directory'
'expires-after': get_expiration(config, 'medium'),
}) })
if not worker.get('skip-artifacts', False): if not worker.get('skip-artifacts', False):
generic_worker_add_artifacts(config, job, taskdesc) generic_worker_add_artifacts(config, job, taskdesc)

Просмотреть файл

@ -41,7 +41,6 @@ from taskgraph.util.scriptworker import (
) )
from taskgraph.util.signed_artifacts import get_signed_artifacts from taskgraph.util.signed_artifacts import get_signed_artifacts
from taskgraph.util.workertypes import worker_type_implementation from taskgraph.util.workertypes import worker_type_implementation
from taskgraph.transforms.job.common import get_expiration
from voluptuous import Any, Required, Optional, Extra, Match, All, NotIn from voluptuous import Any, Required, Optional, Extra, Match, All, NotIn
from taskgraph import GECKO, MAX_DEPENDENCIES from taskgraph import GECKO, MAX_DEPENDENCIES
from ..util import docker as dockerutil from ..util import docker as dockerutil
@ -99,7 +98,6 @@ task_description_schema = Schema({
# (e.g., "14 days"). Defaults are set based on the project. # (e.g., "14 days"). Defaults are set based on the project.
Optional('expires-after'): text_type, Optional('expires-after'): text_type,
Optional('deadline-after'): text_type, Optional('deadline-after'): text_type,
Optional('expiration-policy'): text_type,
# custom routes for this task; the default treeherder routes will be added # custom routes for this task; the default treeherder routes will be added
# automatically # automatically
@ -417,8 +415,6 @@ def verify_index(config, index):
# name of the produced artifact (root of the names for # name of the produced artifact (root of the names for
# type=directory) # type=directory)
'name': text_type, 'name': text_type,
'expires-after': text_type,
}], }],
# environment variables # environment variables
@ -438,7 +434,7 @@ def verify_index(config, index):
# should be purged # should be purged
Optional('purge-caches-exit-status'): [int], Optional('purge-caches-exit-status'): [int],
# Whether any artifacts are assigned to this worker # Wether any artifacts are assigned to this worker
Optional('skip-artifacts'): bool, Optional('skip-artifacts'): bool,
}) })
def build_docker_worker_payload(config, task, task_def): def build_docker_worker_payload(config, task, task_def):
@ -549,15 +545,11 @@ def build_docker_worker_payload(config, task, task_def):
if 'artifacts' in worker: if 'artifacts' in worker:
artifacts = {} artifacts = {}
expires_policy = get_expiration(config, task.get('expiration-policy', 'default'))
for artifact in worker['artifacts']: for artifact in worker['artifacts']:
expires = artifact.get('expires-after', expires_policy)
if expires > task_def['expires']['relative-datestamp']:
expires = task_def['expires']['relative-datestamp']
artifacts[artifact['name']] = { artifacts[artifact['name']] = {
'path': artifact['path'], 'path': artifact['path'],
'type': artifact['type'], 'type': artifact['type'],
'expires': {'relative-datestamp': expires}, 'expires': task_def['expires'], # always expire with the task
} }
payload['artifacts'] = artifacts payload['artifacts'] = artifacts
@ -673,9 +665,7 @@ def build_docker_worker_payload(config, task, task_def):
'path': text_type, 'path': text_type,
# if not specified, path is used for artifact name # if not specified, path is used for artifact name
Optional('name'): text_type, Optional('name'): text_type
'expires-after': text_type,
}], }],
# Directories and/or files to be mounted. # Directories and/or files to be mounted.
@ -787,13 +777,10 @@ def build_generic_worker_payload(config, task, task_def):
artifacts = [] artifacts = []
expires_policy = get_expiration(config, task.get('expiration-policy', 'default'))
for artifact in worker.get('artifacts', []): for artifact in worker.get('artifacts', []):
expires = artifact.get('expires-after', expires_policy)
a = { a = {
'path': artifact['path'], 'path': artifact['path'],
'type': artifact['type'], 'type': artifact['type'],
'expires': {'relative-datestamp': expires},
} }
if 'name' in artifact: if 'name' in artifact:
a['name'] = artifact['name'] a['name'] = artifact['name']
@ -1702,25 +1689,6 @@ def try_task_config_routes(config, tasks):
yield task yield task
@transforms.add
def set_task_and_artifact_expiry(config, jobs):
"""Set the default expiry for tasks and their artifacts.
These values are read from ci/config.yml
"""
for job in jobs:
expires = get_expiration(config, job.get('expiration-policy', 'default'))
if 'expires-after' not in job:
job['expires-after'] = expires
if 'artifacts' in job['worker']:
for a in job['worker']['artifacts']:
if 'expires-after' not in a:
a['expires-after'] = expires
yield job
@transforms.add @transforms.add
def build_task(config, tasks): def build_task(config, tasks):
for task in tasks: for task in tasks:
@ -1782,6 +1750,9 @@ def build_task(config, tasks):
) )
) )
if 'expires-after' not in task:
task['expires-after'] = '28 days' if config.params.is_try() else '1 year'
if 'deadline-after' not in task: if 'deadline-after' not in task:
task['deadline-after'] = '1 day' task['deadline-after'] = '1 day'

Просмотреть файл

@ -999,6 +999,19 @@ def set_tier(config, tasks):
yield task yield task
@transforms.add
def set_expires_after(config, tasks):
"""Try jobs expire after 2 weeks; everything else lasts 1 year. This helps
keep storage costs low."""
for task in tasks:
if 'expires-after' not in task:
if config.params.is_try():
task['expires-after'] = "14 days"
else:
task['expires-after'] = "1 year"
yield task
@transforms.add @transforms.add
def set_download_symbols(config, tasks): def set_download_symbols(config, tasks):
"""In general, we download symbols immediately for debug builds, but only """In general, we download symbols immediately for debug builds, but only
@ -1813,6 +1826,7 @@ def make_job_description(config, tasks):
if task['mozharness']['requires-signed-builds'] is True: if task['mozharness']['requires-signed-builds'] is True:
jobdesc['dependencies']['build-signing'] = task['build-signing-label'] jobdesc['dependencies']['build-signing'] = task['build-signing-label']
jobdesc['expires-after'] = task['expires-after']
jobdesc['routes'] = [] jobdesc['routes'] = []
jobdesc['run-on-projects'] = sorted(task['run-on-projects']) jobdesc['run-on-projects'] = sorted(task['run-on-projects'])
jobdesc['scopes'] = [] jobdesc['scopes'] = []