зеркало из https://github.com/mozilla/treeherder.git
Add CloudOps-ready Docker builds (#7072)
* First stab at dockerflow support * Add dockerfile and entrypoint for production builds * Add docker build to circle * Add stub version.json file * Fix linting issues * Add entrypoints for cronjobs
This commit is contained in:
Родитель
b1f6a7d635
Коммит
66f1eac118
|
@ -54,6 +54,62 @@ jobs:
|
|||
- run: tox -e docker # Run tests and coverage within Docker container
|
||||
- run: codecov --required -f coverage.xml
|
||||
|
||||
test-docker-build:
|
||||
docker:
|
||||
- image: docker:18.02.0-ce
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Create a version.json
|
||||
command: |
|
||||
# create a version.json per https://github.com/mozilla-services/Dockerflow/blob/master/docs/version_object.md
|
||||
printf '{"commit":"%s","version":"%s","source":"https://github.com/%s/%s","build":"%s"}\n' \
|
||||
"$CIRCLE_SHA1" \
|
||||
"$CIRCLE_TAG" \
|
||||
"$CIRCLE_PROJECT_USERNAME" \
|
||||
"$CIRCLE_PROJECT_REPONAME" \
|
||||
"$CIRCLE_BUILD_URL" > version.json
|
||||
- run:
|
||||
name: Build Docker image
|
||||
command: docker build -f docker/Dockerfile -t app:build .
|
||||
|
||||
deploy:
|
||||
docker:
|
||||
- image: docker:18.02.0-ce
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Create a version.json
|
||||
command: |
|
||||
# create a version.json per https://github.com/mozilla-services/Dockerflow/blob/master/docs/version_object.md
|
||||
printf '{"commit":"%s","version":"%s","source":"https://github.com/%s/%s","build":"%s"}\n' \
|
||||
"$CIRCLE_SHA1" \
|
||||
"$CIRCLE_TAG" \
|
||||
"$CIRCLE_PROJECT_USERNAME" \
|
||||
"$CIRCLE_PROJECT_REPONAME" \
|
||||
"$CIRCLE_BUILD_URL" > version.json
|
||||
- run:
|
||||
name: Build Docker image
|
||||
command: docker build -f docker/Dockerfile -t app:build .
|
||||
|
||||
- run:
|
||||
name: Deploy to Dockerhub
|
||||
command: |
|
||||
echo $DOCKER_PASS | docker login -u $DOCKER_USER --password-stdin
|
||||
# deploy master
|
||||
if [ "${CIRCLE_BRANCH}" == "master" ]; then
|
||||
docker tag app:build ${DOCKERHUB_REPO}:latest
|
||||
docker push ${DOCKERHUB_REPO}:latest
|
||||
elif [ ! -z "${CIRCLE_TAG}" ]; then
|
||||
# deploy a release tag...
|
||||
echo "${DOCKERHUB_REPO}:${CIRCLE_TAG}"
|
||||
docker tag app:build "${DOCKERHUB_REPO}:${CIRCLE_TAG}"
|
||||
docker images
|
||||
docker push "${DOCKERHUB_REPO}:${CIRCLE_TAG}"
|
||||
fi
|
||||
|
||||
orbs:
|
||||
node: circleci/node@4.1.0
|
||||
docker: circleci/docker@1.5.0
|
||||
|
@ -64,3 +120,10 @@ workflows:
|
|||
- javascript-tests
|
||||
- builds
|
||||
- python-tests
|
||||
- test-docker-build
|
||||
- deploy:
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
branches:
|
||||
only: master
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
# Exclude all files but those used by the `Dockerfile` to speed up
|
||||
# the transferring of the build context during the Docker build.
|
||||
# The rest of the source will be made available via bind mounts.
|
||||
|
||||
*
|
||||
!requirements/
|
|
@ -0,0 +1,42 @@
|
|||
## Frontend stage
|
||||
FROM node:12.14.1 AS frontend
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ui/ /app/ui/
|
||||
COPY .neutrinorc.js package.json webpack.config.js yarn.lock /app/
|
||||
|
||||
RUN npm install -g --force yarn@1.22.5
|
||||
RUN yarn install
|
||||
RUN yarn build
|
||||
|
||||
|
||||
## Backend stage
|
||||
FROM python:3.7.10-slim
|
||||
|
||||
# libmysqlclient-dev is required for the mysqlclient Python package.
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements/ /app/requirements/
|
||||
RUN apt-get update && apt-get install -q --yes gcc && \
|
||||
pip install -r requirements/common.txt && \
|
||||
apt-get -q --yes remove gcc && \
|
||||
apt-get -q --yes autoremove && \
|
||||
apt-get clean && \
|
||||
rm -rf /root/.cache
|
||||
|
||||
COPY bin/ /app/bin/
|
||||
COPY docker/entrypoint_prod.sh /app/docker/entrypoint_prod.sh
|
||||
COPY treeherder/ /app/treeherder/
|
||||
COPY manage.py newrelic.ini version.json /app/
|
||||
|
||||
COPY --from=frontend /app/.build/ /app/.build/
|
||||
|
||||
RUN python manage.py collectstatic --noinput
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "/app/docker/entrypoint_prod.sh"]
|
||||
CMD ["web"]
|
|
@ -0,0 +1,99 @@
|
|||
#!/bin/bash
|
||||
|
||||
# The `release` process type specifies the command to run during deployment, and is where
|
||||
# we run DB migrations and other tasks that are 'release' rather than 'build' specific:
|
||||
# https://devcenter.heroku.com/articles/release-phase
|
||||
# https://devcenter.heroku.com/articles/runtime-principles#build-release-run
|
||||
if [ "$1" == "release" ]; then
|
||||
exec ../bin/pre_deploy
|
||||
|
||||
# The `web` process type is the only one that receives external traffic from Heroku's routers.
|
||||
# We set the maximum request duration to 20 seconds, to ensure that poorly performing API
|
||||
# queries do not consume a gunicorn worker for unbounded lengths of time. See:
|
||||
# https://devcenter.heroku.com/articles/python-gunicorn
|
||||
# The Heroku Python buildpack sets some sensible gunicorn defaults via environment variables:
|
||||
# https://github.com/heroku/heroku-buildpack-python/blob/master/vendor/python.gunicorn.sh
|
||||
# https://github.com/heroku/heroku-buildpack-python/blob/master/vendor/WEB_CONCURRENCY.sh
|
||||
# TODO: Experiment with different dyno sizes and gunicorn concurrency/worker types (bug 1175472).
|
||||
elif [ "$1" == "web" ]; then
|
||||
exec newrelic-admin run-program gunicorn treeherder.config.wsgi:application --timeout 20 --bind 0.0.0.0
|
||||
|
||||
# All other process types can have arbitrary names.
|
||||
# The Celery options such as `--without-heartbeat` are from the recommendations here:
|
||||
# https://www.cloudamqp.com/docs/celery.html
|
||||
# The REMAP_SIGTERM is as recommended by:
|
||||
# https://devcenter.heroku.com/articles/celery-heroku#using-remap_sigterm
|
||||
|
||||
# This schedules (but does not run itself) the cron-like tasks listed in `CELERY_BEAT_SCHEDULE`.
|
||||
# However we're moving away from using this in favour of the Heroku scheduler addon.
|
||||
# NB: This should not be scaled up to more than 1 dyno otherwise duplicate tasks will be scheduled.
|
||||
# TODO: Move the remaining tasks to the addon and remove this process type (deps of bug 1176492).
|
||||
elif [ "$1" == "celery_scheduler" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery beat -A treeherder
|
||||
|
||||
# Push/job data is consumed from exchanges on pulse.mozilla.org using these kombu-powered
|
||||
# Django management commands. They do not ingest the data themselves, instead adding tasks
|
||||
# to the `store_pulse_{pushes,jobs}` queues for `worker_store_pulse_data` to process.
|
||||
# NB: These should not be scaled up to more than 1 of each.
|
||||
# TODO: Merge these two listeners into one since they use so little CPU each (bug 1530965).
|
||||
elif [ "$1" == "pulse_listener_pushes" ]; then
|
||||
exec newrelic-admin run-program ./manage.py pulse_listener_pushes
|
||||
elif [ "$1" == "pulse_listener_tasks" ]; then
|
||||
exec newrelic-admin run-program ./manage.py pulse_listener_tasks
|
||||
|
||||
# Processes pushes/jobs from Pulse that were collected by `pulse_listener_{pushes,tasks}`.
|
||||
elif [ "$1" == "worker_store_pulse_data" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q store_pulse_pushes,store_pulse_tasks --concurrency=3
|
||||
|
||||
# Handles the log parsing tasks scheduled by `worker_store_pulse_data` as part of job ingestion.
|
||||
elif [ "$1" == "worker_log_parser" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q log_parser --concurrency=7
|
||||
elif [ "$1" == "worker_log_parser_fail_raw_sheriffed" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q log_parser_fail_raw_sheriffed --concurrency=1
|
||||
elif [ "$1" == "worker_log_parser_fail_raw_unsheriffed" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q log_parser_fail_raw_unsheriffed --concurrency=1
|
||||
elif [ "$1" == "worker_log_parser_fail_json_sheriffed" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q log_parser_fail_json_sheriffed --concurrency=7
|
||||
elif [ "$1" == "worker_log_parser_fail_json_unsheriffed" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q log_parser_fail_json_unsheriffed --concurrency=7
|
||||
|
||||
# Tasks that don't need a dedicated worker.
|
||||
elif [ "$1" == "worker_misc" ]; then
|
||||
export REMAP_SIGTERM=SIGQUIT
|
||||
exec newrelic-admin run-program celery worker -A treeherder --without-gossip --without-mingle --without-heartbeat -Q default,generate_perf_alerts,pushlog,seta_analyze_failures --concurrency=3
|
||||
|
||||
# Cron jobs
|
||||
elif [ "$1" == "run_intermittents_commenter" ]; then
|
||||
newrelic-admin run-program ./manage.py run_intermittents_commenter -m auto
|
||||
|
||||
elif [ "$1" == "update_bugscache" ]; then
|
||||
newrelic-admin run-program ./manage.py update_bugscache
|
||||
|
||||
elif [ "$1" == "cache_failure_history" ]; then
|
||||
newrelic-admin run-program ./manage.py cache_failure_history
|
||||
|
||||
elif [ "$1" == "cycle_data" ]; then
|
||||
CHUNK_SIZE="$2"
|
||||
DAYS="$3"
|
||||
# may be empty
|
||||
FROM="$4"
|
||||
./manage.py cycle_data --chunk-size="${CHUNK_SIZE}" --days "${DAYS}" "${FROM}"
|
||||
|
||||
elif [ "$1" == "perf_sheriff" ]; then
|
||||
TIME_WINDOW="$2"
|
||||
newrelic-admin run-program ./manage.py perf_sheriff --time-window="${TIME_WINDOW}"
|
||||
|
||||
elif [ "$1" == "update_changelog" ]; then
|
||||
newrelic-admin run-program ./manage.py update_changelog --days 2
|
||||
|
||||
else
|
||||
echo "Unknown command: $1"
|
||||
exit 1
|
||||
fi
|
|
@ -35,3 +35,6 @@ django-cache-memoize # Imported as cache_memoize
|
|||
mozci
|
||||
|
||||
importlib-metadata
|
||||
|
||||
# Dockerflow/CloudOps APIs
|
||||
dockerflow
|
||||
|
|
|
@ -179,6 +179,10 @@ djangorestframework==3.12.2 \
|
|||
--hash=sha256:0209bafcb7b5010fdfec784034f059d512256424de2a0f084cb82b096d6dd6a7 \
|
||||
--hash=sha256:0898182b4737a7b584a2c73735d89816343369f259fea932d90dc78e35d8ac33
|
||||
# via -r requirements/common.in
|
||||
dockerflow==2020.10.0 \
|
||||
--hash=sha256:226086becc436b5a1995348e26c4fb2ad1d4e5dbc10dffec0c675c9a43306c8b \
|
||||
--hash=sha256:36787fa016e5505d71d60c36cd4e0de7b2d1e50059160bd4e93ceb62bb40d3f8
|
||||
# via -r requirements/common.in
|
||||
ecdsa==0.14.1 \
|
||||
--hash=sha256:64c613005f13efec6541bb0a33290d0d03c27abab5f15fbab20fb0ee162bdd8e \
|
||||
--hash=sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe
|
||||
|
|
|
@ -43,7 +43,7 @@ SITE_URL = env("SITE_URL", default='http://localhost:8000')
|
|||
|
||||
SITE_HOSTNAME = furl(SITE_URL).host
|
||||
# Including localhost allows using the backend locally
|
||||
ALLOWED_HOSTS = [SITE_HOSTNAME, 'localhost']
|
||||
ALLOWED_HOSTS = [SITE_HOSTNAME, 'localhost', '127.0.0.1']
|
||||
|
||||
# URL handling
|
||||
APPEND_SLASH = False
|
||||
|
@ -70,6 +70,7 @@ INSTALLED_APPS = [
|
|||
'rest_framework',
|
||||
'corsheaders',
|
||||
'django_filters',
|
||||
'dockerflow.django',
|
||||
# treeherder apps
|
||||
'treeherder.model',
|
||||
'treeherder.webapp',
|
||||
|
@ -118,6 +119,7 @@ MIDDLEWARE = [
|
|||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'dockerflow.django.middleware.DockerflowMiddleware',
|
||||
]
|
||||
if middleware
|
||||
]
|
||||
|
@ -226,9 +228,11 @@ LOGGING = {
|
|||
'standard': {
|
||||
'format': "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
|
||||
},
|
||||
'json': {'()': 'dockerflow.logging.JsonLogFormatter', 'logger_name': 'treeherder'},
|
||||
},
|
||||
'handlers': {
|
||||
'console': {'class': 'logging.StreamHandler', 'formatter': 'standard'},
|
||||
'json': {'class': 'logging.StreamHandler', 'formatter': 'json', 'level': 'DEBUG'},
|
||||
},
|
||||
'loggers': {
|
||||
'django': {
|
||||
|
@ -251,6 +255,10 @@ LOGGING = {
|
|||
'handlers': ['console'],
|
||||
'level': 'WARNING',
|
||||
},
|
||||
'request.summary': {
|
||||
'handlers': ['json'],
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -473,3 +481,6 @@ NOTIFY_ACCESS_TOKEN = env('NOTIFY_ACCESS_TOKEN', default=None)
|
|||
# This is only used for removing the rate limiting. You can create your own here:
|
||||
# https://github.com/settings/tokens
|
||||
GITHUB_TOKEN = env("GITHUB_TOKEN", default=None)
|
||||
|
||||
# For dockerflow
|
||||
BASE_DIR = SRC_DIR
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"commit": "stub",
|
||||
"version": "stub",
|
||||
"source": "https://github.com/mozilla/treeherder",
|
||||
"build": "https://tools.taskcluster.net/task-inspector/#XXXXXXXXXXXXXXXXXX"
|
||||
}
|
Загрузка…
Ссылка в новой задаче