зеркало из https://github.com/mozilla/mozillians.git
Merge pull request #4211 from johngian/ecs-staging
Mozillians.org migration to ECS
This commit is contained in:
Коммит
c29699cd94
|
@ -14,6 +14,7 @@ env:
|
|||
DEBUG=True
|
||||
CELERY_TASK_ALWAYS_EAGER=True
|
||||
DEV=True
|
||||
ES_CONNECTION=local
|
||||
services:
|
||||
- elasticsearch
|
||||
- mysql
|
||||
|
|
|
@ -7,7 +7,7 @@ from django.conf import settings
|
|||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.core.urlresolvers import reverse as django_reverse
|
||||
from django.http import HttpResponsePermanentRedirect, HttpResponseRedirect
|
||||
from django.http import HttpResponsePermanentRedirect, HttpResponseRedirect, HttpResponse
|
||||
from django.utils.encoding import iri_to_uri, smart_str
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _lazy, activate
|
||||
|
@ -195,3 +195,14 @@ class DinoParkLoginMiddleware(object):
|
|||
return HttpResponseRedirect('/beta')
|
||||
|
||||
return self.get_response(request)
|
||||
|
||||
|
||||
class HealthcheckMiddleware(object):
|
||||
"""Add middleware for healthcheck paths"""
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
if request.path_info == '/healthcheck':
|
||||
return HttpResponse('OK')
|
||||
return self.get_response(request)
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
import boto3
|
||||
import os
|
||||
|
||||
from botocore.auth import SigV4Auth
|
||||
from requests_aws4auth import AWS4Auth
|
||||
from elasticsearch import RequestsHttpConnection
|
||||
|
||||
|
||||
class AWS4AuthEncoded(AWS4Auth):
|
||||
def __call__(self, request):
|
||||
request = super(AWS4AuthEncoded, self).__call__(request)
|
||||
|
||||
for header_name in request.headers:
|
||||
self._encode_header_to_utf8(request, header_name)
|
||||
|
||||
return request
|
||||
|
||||
def _encode_header_to_utf8(self, request, header_name):
|
||||
value = request.headers[header_name]
|
||||
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode('utf-8')
|
||||
|
||||
if isinstance(header_name, unicode):
|
||||
del request.headers[header_name]
|
||||
header_name = header_name.encode('utf-8')
|
||||
|
||||
request.headers[header_name] = value
|
||||
|
||||
|
||||
class AWSRequestsHttpConnection(RequestsHttpConnection):
|
||||
def perform_request(self, *args, **kwargs):
|
||||
credentials = boto3.session.Session().get_credentials()
|
||||
signed_creds = SigV4Auth(credentials, 'es', os.environ['AWS_ES_REGION'])
|
||||
|
||||
secure_auth = AWS4AuthEncoded(
|
||||
credentials.access_key, credentials.secret_key,
|
||||
os.environ['AWS_ES_REGION'], 'es',
|
||||
session_token=signed_creds.credentials.token
|
||||
)
|
||||
self.session.auth = secure_auth
|
||||
return super(AWSRequestsHttpConnection, self).perform_request(*args, **kwargs)
|
|
@ -40,6 +40,7 @@ BASKET_API_KEY = 'basket_api_key'
|
|||
BASKET_MANAGERS = None
|
||||
AXES_BEHIND_REVERSE_PROXY = False
|
||||
ES_HOST=es
|
||||
ES_CONNECTION=local
|
||||
|
||||
# Captcha
|
||||
NORECAPTCHA_SITE_KEY = "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI"
|
||||
|
|
|
@ -81,6 +81,7 @@ INSTALLED_APPS = (
|
|||
)
|
||||
|
||||
MIDDLEWARE = [
|
||||
'mozillians.common.middleware.HealthcheckMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
|
@ -421,6 +422,14 @@ ES_PROTOCOL = config('ES_PROTOCOL', default='http://')
|
|||
|
||||
def _lazy_haystack_setup():
|
||||
from django.conf import settings
|
||||
from elasticsearch import RequestsHttpConnection
|
||||
from mozillians.common.search import AWSRequestsHttpConnection
|
||||
|
||||
es_connection = config('ES_CONNECTION', default='aws')
|
||||
es_connection_class = {
|
||||
'aws': AWSRequestsHttpConnection,
|
||||
'local': RequestsHttpConnection
|
||||
}
|
||||
|
||||
es_url = '%s%s' % (settings.ES_PROTOCOL, settings.ES_HOST)
|
||||
es_index_name = config('ES_INDEX_NAME', default='mozillians_haystack')
|
||||
|
@ -428,17 +437,26 @@ def _lazy_haystack_setup():
|
|||
'default': {
|
||||
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
|
||||
'URL': es_url,
|
||||
'INDEX_NAME': es_index_name
|
||||
'INDEX_NAME': es_index_name,
|
||||
'KWARGS': {
|
||||
'connection_class': es_connection_class[es_connection]
|
||||
}
|
||||
},
|
||||
'tmp': {
|
||||
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
|
||||
'URL': es_url,
|
||||
'INDEX_NAME': 'tmp_{}'.format(es_index_name)
|
||||
'INDEX_NAME': 'tmp_{}'.format(es_index_name),
|
||||
'KWARGS': {
|
||||
'connection_class': es_connection_class[es_connection]
|
||||
}
|
||||
},
|
||||
'current': {
|
||||
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
|
||||
'URL': es_url,
|
||||
'INDEX_NAME': 'current_{}'.format(es_index_name)
|
||||
'INDEX_NAME': 'current_{}'.format(es_index_name),
|
||||
'KWARGS': {
|
||||
'connection_class': es_connection_class[es_connection]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -595,3 +595,6 @@ backports.ssl_match_hostname==3.7.0.1 \
|
|||
--hash=sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2
|
||||
mysqlclient==1.4.4 \
|
||||
--hash=sha256:9c737cc55a5dc8dd3583a942d5a9b21be58d16f00f5fefca4e575e7d9682e98c
|
||||
requests-aws4auth==0.9 \
|
||||
--hash=sha256:c9973af472d6d358ee301f077608361e078642aa019785139b588d526f50a23c \
|
||||
--hash=sha256:e20e4941ccd5706973068f9214d40cb2e669461536b3a57b9ac824ae87744c2c
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
COMMIT_HASH=$(shell git rev-parse --short HEAD)
|
||||
DOCKER_REPO=$(ECS_DOCKER_REPO)
|
||||
ENV=$(ECS_ENV)
|
||||
CLUSTER=$(ECS_CLUSTER)
|
||||
|
||||
.PHONY: help
|
||||
|
||||
help:
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
check-aws-dep:
|
||||
@which aws
|
||||
|
||||
check-ecs-deploy-dep:
|
||||
@which ecs-deploy
|
||||
|
||||
check-jq-dep:
|
||||
@which jq
|
||||
|
||||
check-deps: check-aws-dep check-ecs-deploy-dep check-jq-dep
|
||||
|
||||
build-web: ## Build mozillians web docker image
|
||||
docker build -t mozillians:$(COMMIT_HASH) -f ../../docker/prod ../../ --no-cache
|
||||
|
||||
tag-web: build-web ## Tag mozillians web docker image
|
||||
docker tag mozillians:$(COMMIT_HASH) $(DOCKER_REPO):$(COMMIT_HASH)
|
||||
|
||||
push-web: tag-web ## Push mozillians web docker image
|
||||
docker push $(DOCKER_REPO):$(COMMIT_HASH)
|
||||
|
||||
deploy-web: push-web check-deps ## Deploy new image to ECS web service
|
||||
ecs-deploy -c $(CLUSTER) -n mozillians-$(ENV)-web -i $(DOCKER_REPO):$(COMMIT_HASH) --skip-deployments-check --timeout 300
|
||||
|
||||
deploy-celery: push-web check-deps ## Deploy new image to ECS celery service
|
||||
ecs-deploy -c $(CLUSTER) -n mozillians-$(ENV)-celery -i $(DOCKER_REPO):$(COMMIT_HASH) --skip-deployments-check --timeout 300
|
||||
|
||||
deploy-celerybeat: push-web check-deps ## Deploy new image to ECS celerybeat service
|
||||
ecs-deploy -c $(CLUSTER) -n mozillians-$(ENV)-celery-beat -i $(DOCKER_REPO):$(COMMIT_HASH) --skip-deployments-check --timeout 300
|
||||
|
||||
deploy-all: deploy-web deploy-celery deploy-celerybeat ## Deploy new image to all ECS services
|
|
@ -0,0 +1,53 @@
|
|||
# Deploying to ECS
|
||||
## Current setup
|
||||
|
||||
* One `generic` ECS cluster for mozillians prod/staging
|
||||
* We are hosting images in amazon ECR
|
||||
* One repository for all images
|
||||
* Images are using tags with git hashes for versioning
|
||||
* 4 services per env
|
||||
* mozillians-{staging,prod}-media
|
||||
* Nginx to serve media files
|
||||
* Media files are stored in EFS and mounted in all cluster nodes
|
||||
* mozillians-{staging,prod}-web
|
||||
* Web workers for mozillians.org
|
||||
* State
|
||||
* MySQL
|
||||
* RDS
|
||||
* Redis for celery message broker
|
||||
* ElasticCache
|
||||
* Memcache for caching
|
||||
* ElasticCache
|
||||
* mozillians-{staging,prod}-celery
|
||||
* Celery workers for mozillians
|
||||
* mozillians-{staging,prod}-celerybeat
|
||||
* Celerybeat scheduler for celery
|
||||
* Needs to be only a single instance running
|
||||
* Load balancing
|
||||
* SSL termination and load balancing is happening using ALB
|
||||
* One ALB instance for both envs
|
||||
|
||||
## Deploying code
|
||||
|
||||
Given that development cadence is very low there is no CI for releasing code.
|
||||
Tests are run in travis using GitHub automation. There is `Makefile` to make
|
||||
deploying code easier.
|
||||
|
||||
To deploy new code
|
||||
|
||||
* Make sure you have dependencies installed
|
||||
* `aws`
|
||||
* `jq`
|
||||
* `ecs-deploy`
|
||||
* We are building images locally so make sure your local setup is clean (eg. no `.env` files, no untracked files)
|
||||
* Assume the AWS IAM `AdminAccessRole` to be able to deploy changes
|
||||
* Authenticate docker to use ECR
|
||||
* Export required environment variables
|
||||
* `ECS_DOCKER_REPO`
|
||||
* `ECS_ENV`
|
||||
* `ECS_CLUSTER`
|
||||
* `git checkout` to the branch/tag you want to deploy
|
||||
* `make deploy-all` to deploy all services
|
||||
* Builds new docker images tagged as `mozillians:<git-rev>`
|
||||
* Pushes images in docker registry (ECR)
|
||||
* Deploys new code to ECS
|
Загрузка…
Ссылка в новой задаче