Kubernetes image is extended rather than customized (#10399)

The EMBEDDED dags were only really useful for testing
but it required to customise built production image
(run with extra --build-arg flag). This is not needed
as it is better to extend the image instead with FROM
and add dags afterwards. This way you do not have
to rebuild the image while iterating on it.
This commit is contained in:
Jarek Potiuk 2020-08-19 14:19:05 +02:00 коммит произвёл GitHub
Родитель 0b3ded7a55
Коммит e17985382c
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
7 изменённых файлов: 26 добавлений и 25 удалений

Просмотреть файл

@ -361,11 +361,6 @@ COPY --chown=airflow:root --from=airflow-build-image /root/.local "${AIRFLOW_USE
COPY scripts/prod/entrypoint_prod.sh /entrypoint
COPY scripts/prod/clean-logs.sh /clean-logs
ARG EMBEDDED_DAGS="empty"
COPY --chown=airflow:root ${EMBEDDED_DAGS}/ ${AIRFLOW_HOME}/dags/
RUN chmod a+x /entrypoint /clean-logs
# Make /etc/passwd root-group-writeable so that user can be dynamically added by OpenShift

Просмотреть файл

@ -408,9 +408,6 @@ The following build arguments (``--build-arg`` in docker build command) can be u
| ``ADDITIONAL_RUNTIME_DEPS`` | | additional apt runtime dependencies to |
| | | install |
+------------------------------------------+------------------------------------------+------------------------------------------+
| ``EMBEDDED_DAGS`` | ``empty`` | Folder containing dags embedded into the |
| | | image in the ${AIRFLOW_HOME}/dags dir |
+------------------------------------------+------------------------------------------+------------------------------------------+
| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflows HOME (thats where logs and |
| | | sqlite databases are stored) |
+------------------------------------------+------------------------------------------+------------------------------------------+

1
breeze
Просмотреть файл

@ -538,6 +538,7 @@ function prepare_command_files() {
# Base python image for the build
export PYTHON_BASE_IMAGE=python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster
export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
export AIRFLOW_PROD_IMAGE_KUBERNETES="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-kubernetes"
export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
export BUILT_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"

Просмотреть файл

@ -32,7 +32,9 @@ trap "${HANDLERS}${HANDLERS:+;}dump_kind_logs" EXIT
get_environment_for_builds_on_ci
initialize_kind_variables
make_sure_kubernetes_tools_are_installed
build_prod_image_for_kubernetes_tests
prepare_prod_build
build_prod_image
build_image_for_kubernetes_tests
load_image_to_kind_cluster
deploy_airflow_with_helm
forward_port_to_kind_webserver

Просмотреть файл

@ -639,6 +639,7 @@ function prepare_prod_build() {
export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
export AIRFLOW_PROD_BUILD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-build"
export AIRFLOW_PROD_IMAGE_KUBERNETES="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-kubernetes"
export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}"
export AIRFLOW_PROD_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}"
export PYTHON_BASE_IMAGE="python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
@ -664,6 +665,7 @@ function prepare_prod_build() {
"${CACHE_REGISTRY}"
fi
export CACHE_IMAGE_PREFIX=${CACHE_IMAGE_PREFX:=${GITHUB_REPOSITORY}}
export AIRFLOW_PROD_IMAGE_KUBERNETES="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-kubernetes"
export CACHED_AIRFLOW_PROD_IMAGE="${CACHE_REGISTRY}/${CACHE_IMAGE_PREFIX}/${AIRFLOW_PROD_BASE_TAG}"
export CACHED_AIRFLOW_PROD_BUILD_IMAGE="${CACHE_REGISTRY}/${CACHE_IMAGE_PREFIX}/${AIRFLOW_PROD_BASE_TAG}-build"
export CACHED_PYTHON_BASE_IMAGE="${CACHE_REGISTRY}/${CACHE_IMAGE_PREFIX}/python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
@ -757,7 +759,6 @@ function build_prod_image() {
--build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
--build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
--build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
--build-arg EMBEDDED_DAGS="${EMBEDDED_DAGS}" \
--build-arg BUILD_ID="${CI_BUILD_ID}" \
--build-arg COMMIT_SHA="${COMMIT_SHA}" \
"${DOCKER_CACHE_PROD_DIRECTIVE[@]}" \

Просмотреть файл

@ -214,9 +214,6 @@ function initialize_common_environment {
# Version of Kubernetes to run
export KUBERNETES_VERSION="${KUBERNETES_VERSION:=${DEFAULT_KUBERNETES_VERSION}}"
# folder with DAGs to embed into production image
export EMBEDDED_DAGS=${EMBEDDED_DAGS:="empty"}
# Namespace where airflow is installed via helm
export HELM_AIRFLOW_NAMESPACE="airflow"
@ -241,7 +238,7 @@ function get_environment_for_builds_on_ci() {
export CI_JOB_ID="default-job-id"
if [[ ${CI:=} != "true" ]]; then
print_info
print_info "This is not a CI environment!. Staying with the defaults"
print_info "This is not a CI environment! Staying with the defaults."
print_info
else
if [[ ${TRAVIS:=} == "true" ]]; then

Просмотреть файл

@ -182,7 +182,9 @@ function perform_kind_cluster_operation() {
get_environment_for_builds_on_ci
make_sure_kubernetes_tools_are_installed
initialize_kind_variables
build_prod_image_for_kubernetes_tests
prepare_prod_build
build_prod_image
build_image_for_kubernetes_tests
load_image_to_kind_cluster
deploy_airflow_with_helm
forward_port_to_kind_webserver
@ -248,20 +250,26 @@ function check_cluster_ready_for_airflow() {
}
function build_prod_image_for_kubernetes_tests() {
function build_image_for_kubernetes_tests() {
cd "${AIRFLOW_SOURCES}" || exit 1
export EMBEDDED_DAGS="airflow/example_dags"
export DOCKER_CACHE=${DOCKER_CACHE:="pulled"}
prepare_prod_build
build_prod_image
echo "The ${AIRFLOW_PROD_IMAGE} is prepared for test kubernetes deployment."
docker build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}" . -f - <<EOF
FROM ${AIRFLOW_PROD_IMAGE}
USER root
COPY --chown=airflow:root airflow/example_dags/ \${AIRFLOW_HOME}/dags/
USER airflow
EOF
echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES} is prepared for test kubernetes deployment."
}
function load_image_to_kind_cluster() {
echo
echo "Loading ${AIRFLOW_PROD_IMAGE} to ${KIND_CLUSTER_NAME}"
echo "Loading ${AIRFLOW_PROD_IMAGE_KUBERNETES} to ${KIND_CLUSTER_NAME}"
echo
kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE}"
kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE_KUBERNETES}"
}
function forward_port_to_kind_webserver() {
@ -300,8 +308,8 @@ function deploy_airflow_with_helm() {
helm install airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \
--set "defaultAirflowRepository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
--set "images.airflow.repository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
--set "images.airflow.tag=${AIRFLOW_PROD_BASE_TAG}" -v 1 \
--set "defaultAirflowTag=${AIRFLOW_PROD_BASE_TAG}" -v 1 \
--set "images.airflow.tag=${AIRFLOW_PROD_BASE_TAG}-kubernetes" -v 1 \
--set "defaultAirflowTag=${AIRFLOW_PROD_BASE_TAG}-kubernetes" -v 1 \
--set "config.api.auth_backend=airflow.api.auth.backend.default"
echo
popd || exit 1