Backed out changeset 5ffd6c7264ce (bug 1341214)

This commit is contained in:
Carsten "Tomcat" Book 2017-02-22 15:33:17 +01:00
Родитель 9d982b9508
Коммит 42bc96ae3d
11 изменённых файлов: 70 добавлений и 110 удалений

Просмотреть файл

@ -22,6 +22,8 @@ from mach.decorators import (
from mozbuild.base import MachCommandBase
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
class ShowTaskGraphSubCommand(SubCommand):
"""A SubCommand with TaskGraph-specific arguments"""

Просмотреть файл

@ -6,17 +6,18 @@
from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
import requests
import yaml
from .create import create_tasks
from .decision import write_artifact
from .optimize import optimize_task_graph
from .taskgraph import TaskGraph
from .util.taskcluster import get_artifact
logger = logging.getLogger(__name__)
TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task"
TREEHERDER_URL = "https://treeherder.mozilla.org/api"
# We set this to 5 for now because this is what SETA sets the
@ -62,6 +63,15 @@ def add_tasks(decision_task_id, task_labels, prefix=''):
create_tasks(optimized_graph, label_to_taskid, decision_params)
def get_artifact(task_id, path):
resp = requests.get(url="{}/{}/artifacts/{}".format(TASKCLUSTER_QUEUE_URL, task_id, path))
if path.endswith('.json'):
artifact = json.loads(resp.text)
elif path.endswith('.yml'):
artifact = yaml.load(resp.text)
return artifact
def backfill(project, job_id):
"""
Run the backfill task. This function implements `mach taskgraph backfill-task`,

Просмотреть файл

@ -12,32 +12,31 @@ import sys
import subprocess
import tarfile
import tempfile
import urllib2
import which
from subprocess import Popen, PIPE
from io import BytesIO
from taskgraph.util import docker
from taskgraph.util.taskcluster import (
find_task_id,
get_artifact_url,
)
from . import GECKO
DOCKER_INDEX = docker.INDEX_PREFIX + '.{}.{}.hash.{}'
INDEX_URL = 'https://index.taskcluster.net/v1/task/' + docker.INDEX_PREFIX + '.{}.{}.hash.{}'
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
def load_image_by_name(image_name, tag=None):
context_path = os.path.join(GECKO, 'taskcluster', 'docker', image_name)
context_hash = docker.generate_context_hash(GECKO, context_path, image_name)
index_path = DOCKER_INDEX.format('level-3', image_name, context_hash)
task_id = find_task_id(index_path)
image_index_url = INDEX_URL.format('level-3', image_name, context_hash)
print("Fetching", image_index_url)
task = json.load(urllib2.urlopen(image_index_url))
return load_image_by_task_id(task_id, tag)
return load_image_by_task_id(task['taskId'], tag)
def load_image_by_task_id(task_id, tag=None):
artifact_url = get_artifact_url(task_id, 'public/image.tar.zst')
artifact_url = ARTIFACT_URL.format(task_id, 'public/image.tar.zst')
result = load_image(artifact_url, tag)
print("Found docker image: {}:{}".format(result['image'], result['tag']))
if tag:

Просмотреть файл

@ -62,7 +62,9 @@ def load_parameters_file(options):
Load parameters from the --parameters option
"""
import urllib
from taskgraph.util.taskcluster import get_artifact_url
url_prefix = "https://queue.taskcluster.net/v1/task/"
url_postfix = "/artifacts/public/parameters.yml"
filename = options['parameters']
@ -76,7 +78,7 @@ def load_parameters_file(options):
# fetching parameters.yml using task task-id or supplied url
if filename.startswith("task-id="):
task_id = filename.split("=")[1]
filename = get_artifact_url(task_id, 'public/parameters.yml')
filename = url_prefix + task_id + url_postfix
f = urllib.urlopen(filename)
if filename.endswith('.yml'):

Просмотреть файл

@ -5,8 +5,17 @@
from __future__ import absolute_import, print_function, unicode_literals
import abc
import requests
from taskgraph.util.taskcluster import find_task_id
import json
import os
import urllib2
# if running in a task, prefer to use the taskcluster proxy (http://taskcluster/),
# otherwise hit the services directly
if os.environ.get('TASK_ID'):
INDEX_URL = 'http://taskcluster/index/v1/task/{}'
else:
INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
class Task(object):
@ -100,10 +109,11 @@ class Task(object):
"""
for index_path in self.index_paths:
try:
task_id = find_task_id(index_path)
url = INDEX_URL.format(index_path)
existing_task = json.load(urllib2.urlopen(url))
return True, task_id
except requests.exceptions.HTTPError:
return True, existing_task['taskId']
except urllib2.HTTPError:
pass
return False, None

Просмотреть файл

@ -15,11 +15,17 @@ from taskgraph.util.docker import (
generate_context_hash,
INDEX_PREFIX,
)
from taskgraph.util.taskcluster import get_artifact_url
from taskgraph.util.templates import Templates
logger = logging.getLogger(__name__)
# if running in a task, prefer to use the taskcluster proxy (http://taskcluster/),
# otherwise hit the services directly
if os.environ.get('TASK_ID'):
ARTIFACT_URL = 'http://taskcluster/queue/v1/task/{}/artifacts/{}'
else:
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
class DockerImageTask(base.Task):
@ -88,7 +94,7 @@ class DockerImageTask(base.Task):
# Only return the task ID if the artifact exists for the indexed
# task.
request = urllib2.Request(
get_artifact_url(taskId, 'public/image.tar.zst'))
ARTIFACT_URL.format(taskId, 'public/image.tar.zst'))
request.get_method = lambda: 'HEAD'
urllib2.urlopen(request)

Просмотреть файл

@ -7,6 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
from .graph import Graph
from .util.python_path import find_object
TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task/"
class TaskGraph(object):
"""

Просмотреть файл

@ -3,7 +3,6 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from voluptuous import Schema, Required
from taskgraph.util.taskcluster import get_artifact_url
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.tests import (
test_description_schema,
@ -16,6 +15,8 @@ from taskgraph.transforms.job.common import (
import os
import re
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
ARTIFACTS = [
# (artifact name prefix, in-image path)
("public/logs/", "build/upload/logs/"),
@ -58,11 +59,11 @@ def mozharness_test_on_docker(config, job, taskdesc):
("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"),
]
installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
test_packages_url = get_artifact_url('<build>',
'public/build/target.test_packages.json')
mozharness_url = get_artifact_url('<build>',
'public/build/mozharness.zip')
installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
test_packages_url = ARTIFACT_URL.format('<build>',
'public/build/target.test_packages.json')
mozharness_url = ARTIFACT_URL.format('<build>',
'public/build/mozharness.zip')
worker['artifacts'] = [{
'name': prefix,
@ -205,11 +206,11 @@ def mozharness_test_on_windows(config, job, taskdesc):
target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform)
installer_url = get_artifact_url(
installer_url = ARTIFACT_URL.format(
'<build>', 'public/build/{}.zip'.format(target))
test_packages_url = get_artifact_url(
test_packages_url = ARTIFACT_URL.format(
'<build>', 'public/build/{}.test_packages.json'.format(target))
mozharness_url = get_artifact_url(
mozharness_url = ARTIFACT_URL.format(
'<build>', 'public/build/mozharness.zip')
taskdesc['scopes'].extend(
@ -269,11 +270,11 @@ def mozharness_test_on_mac_osx(config, job, taskdesc):
mozharness = test['mozharness']
worker = taskdesc['worker']
installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
test_packages_url = get_artifact_url('<build>',
'public/build/target.test_packages.json')
mozharness_url = get_artifact_url('<build>',
'public/build/mozharness.zip')
installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
test_packages_url = ARTIFACT_URL.format('<build>',
'public/build/target.test_packages.json')
mozharness_url = ARTIFACT_URL.format('<build>',
'public/build/mozharness.zip')
worker['artifacts'] = [{
'name': prefix.rstrip('/'),

Просмотреть файл

@ -13,6 +13,9 @@ from taskgraph.transforms.task import task_description_schema
from voluptuous import Schema, Any, Required, Optional
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/<{}>/artifacts/{}'
# Voluptuous uses marker objects as dictionary *keys*, but they are not
# comparable, so we cast all of the keys back to regular strings
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}

Просмотреть файл

@ -19,6 +19,7 @@ from .. import GECKO
IMAGE_DIR = os.path.join(GECKO, 'taskcluster', 'docker')
INDEX_PREFIX = 'docker.images.v2'
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
def docker_image(name, by_tag=False):

Просмотреть файл

@ -1,76 +0,0 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import functools
import os
import yaml
import requests
from mozbuild.util import memoize
from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
# if running in a task, prefer to use the taskcluster proxy
# (http://taskcluster/), otherwise hit the services directly
if os.environ.get('TASK_ID'):
INDEX_URL = 'http://taskcluster/index/v1/task/{}'
ARTIFACT_URL = 'http://taskcluster/queue/v1/task/{}/artifacts/{}'
else:
INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
@memoize
def _get_session():
session = requests.Session()
retry = Retry(total=5, backoff_factor=0.1,
status_forcelist=[500, 502, 503, 504])
session.mount('http://', HTTPAdapter(max_retries=retry))
session.mount('https://', HTTPAdapter(max_retries=retry))
return session
def _do_request(url):
session = _get_session()
return session.get(url, stream=True)
def get_artifact_url(task_id, path):
return ARTIFACT_URL.format(task_id, path)
def get_artifact(task_id, path):
"""
Returns the artifact with the given path for the given task id.
If the path ends with ".json" or ".yml", the content is deserialized as,
respectively, json or yaml, and the corresponding python data (usually
dict) is returned.
For other types of content, a file-like object is returned.
"""
response = _do_request(get_artifact_url(task_id, path))
response.raise_for_status()
if path.endswith('.json'):
return response.json()
if path.endswith('.yml'):
return yaml.load(response.text)
response.raw.read = functools.partial(response.raw.read,
decode_content=True)
return response.raw
def list_artifacts(task_id):
response = _do_request(get_artifact_url(task_id, '').rstrip('/'))
response.raise_for_status()
return response.json()['artifacts']
def find_task_id(index_path):
response = _do_request(INDEX_URL.format(index_path))
response.raise_for_status()
return response.json()['taskId']