2257 строки
81 KiB
Bash
Executable File
2257 строки
81 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
set -euo pipefail
|
|
|
|
AIRFLOW_SOURCES="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
|
|
# Bash arrays need to be defined outside of functions unfortunately :(
|
|
# Array with extra options for Docker compose
|
|
declare -a EXTRA_DC_OPTIONS
|
|
# Array with selected integrations
|
|
declare -a INTEGRATIONS
|
|
# This is where remaining args are passed
|
|
declare -a REMAINING_ARGS
|
|
# This is where static check options are defined
|
|
declare -a EXTRA_STATIC_CHECK_OPTIONS
|
|
|
|
# Sets up all the default variables for Breeze
|
|
# They are needed by all other functions
|
|
function setup_default_breeze_variables() {
|
|
# Default command to run - entering breeze environment
|
|
COMMAND_TO_RUN="enter_breeze"
|
|
# In some cases we also want to run two commands in a row (for example when we restart the environment)
|
|
SECOND_COMMAND_TO_RUN=""
|
|
# Indicates that we are inside Breeze environment
|
|
export BREEZE=true
|
|
# Maximum screen width to print the lines spanning the whole terminal width
|
|
export MAX_SCREEN_WIDTH=100
|
|
|
|
# Directory where all CI scripts are located
|
|
export SCRIPTS_CI_DIR="${AIRFLOW_SOURCES}/scripts/ci"
|
|
|
|
# Directory where all the build cache is stored - we keep there status of all the docker images
|
|
# As well as hashes of the important files, but also we generate build scripts there that are
|
|
# Used to execute the commands for breeze
|
|
export BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build"
|
|
# This folder is mounted to inside the container in /files folder. This is the way how
|
|
# We can exchange DAGs, scripts, packages etc with the container environment
|
|
export FILES_DIR="${AIRFLOW_SOURCES}/files"
|
|
# Temporary dir used well ... temporarily
|
|
export TMP_DIR="${AIRFLOW_SOURCES}/tmp"
|
|
|
|
# Create those folders above in case they do not exist
|
|
mkdir -pv "${BUILD_CACHE_DIR}"
|
|
mkdir -pv "${TMP_DIR}"
|
|
mkdir -pv "${FILES_DIR}"
|
|
|
|
# load all the common functions here - those are the functions that are shared between Breeze
|
|
# and CI scripts (CI scripts do not use Breeze as execution environment)
|
|
# shellcheck source=scripts/ci/libraries/_all_libs.sh
|
|
. "${SCRIPTS_CI_DIR}/libraries/_all_libs.sh"
|
|
|
|
# We have different versions of images depending on the python version used. We keep up with the
|
|
# Latest patch-level changes in Python (this is done automatically during CI builds) so we have
|
|
# To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor
|
|
# version of python in X.Y format (3.6, 3.7, 3.8 etc).
|
|
export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION:=$(read_from_file PYTHON_MAJOR_MINOR_VERSION)}"
|
|
|
|
# When we generate documentation for README files, we want to force the width of terminal so that
|
|
# No matter who is running the documentation generation gets the same output
|
|
if [[ ${FORCE_SCREEN_WIDTH:="false"} != "true" ]]; then
|
|
# Sets width of the screen from terminal
|
|
SCREEN_WIDTH="$(tput cols)"
|
|
if [[ -z ${SCREEN_WIDTH} ]]; then
|
|
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
|
|
fi
|
|
if (( SCREEN_WIDTH > MAX_SCREEN_WIDTH )); then
|
|
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
|
|
fi
|
|
else
|
|
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
|
|
fi
|
|
|
|
# Name of the script is kept in this variable
|
|
CMDNAME="$(basename -- "$0")"
|
|
|
|
# Update short and long options in the breeze-complete script
|
|
# This way autocomplete will work automatically with all options available
|
|
# shellcheck source=breeze-complete
|
|
. "${AIRFLOW_SOURCES}/breeze-complete"
|
|
|
|
# By default we mount local Airflow sources
|
|
MOUNT_LOCAL_SOURCES="true"
|
|
|
|
# Holds last sub-command used - this is used by --help flag to print help for the command entered
|
|
LAST_SUBCOMMAND=""
|
|
|
|
# Determines if help should be run (set to true by --help flag)
|
|
RUN_HELP="false"
|
|
|
|
# Holds docker compose command if the `docker-compose` command is used.
|
|
DOCKER_COMPOSE_COMMAND=""
|
|
|
|
# If set to true, the docker images are rebuilt locally. By default we assume we do not need to
|
|
# rebuild the image but if we find we do, this variable will be set to "true"
|
|
export NEEDS_DOCKER_BUILD="false"
|
|
|
|
# By default we only pull images if we do not have them locally.
|
|
# This can be overridden by '--force-pull-images' flag
|
|
export FORCE_PULL_IMAGES="false"
|
|
|
|
# Do not enable Kind Kubernetes cluster by default
|
|
export ENABLE_KIND_CLUSTER="false"
|
|
|
|
# By default we do not push images. This can be overridden by -u flag.
|
|
export PUSH_IMAGES=${PUSH_IMAGES:="false"}
|
|
|
|
# Forward common host credentials to docker (gcloud, aws etc.).
|
|
export FORWARD_CREDENTIALS="false"
|
|
|
|
# If set to true, the database will be reset at entry. Works for Postgres and MySQL
|
|
export DB_RESET="false"
|
|
|
|
# If it set is set to specified version, then the source version of Airflow
|
|
# is removed and the specified version of Airflow is installed from PyPi
|
|
export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""}
|
|
|
|
# If it is set to specified reference (tag/branch), then the source version
|
|
# of Airflow is removed and the specified version of Airflow is installed from GitHub
|
|
export INSTALL_AIRFLOW_REFERENCE=${INSTALL_AIRFLOW_REFERENCE:=""}
|
|
|
|
# Determines whether to force build without checking if it is needed
|
|
# Can be overridden by '--force-build-images' flag.
|
|
export FORCE_BUILD_IMAGES=${FORCE_BUILD_IMAGES:="false"}
|
|
|
|
# If those files are present, the ASCII-art/cheat-sheet are suppressed
|
|
SUPPRESS_CHEATSHEET_FILE="${AIRFLOW_SOURCES}/.suppress_cheatsheet"
|
|
SUPPRESS_ASCIIART_FILE="${AIRFLOW_SOURCES}/.suppress_asciiart"
|
|
|
|
# Default values for the flags used
|
|
|
|
_BREEZE_DEFAULT_BACKEND="sqlite"
|
|
_BREEZE_DEFAULT_KUBERNETES_MODE="image"
|
|
_BREEZE_DEFAULT_KUBERNETES_VERSION="v1.18.6"
|
|
_BREEZE_DEFAULT_KIND_VERSION="v0.8.0"
|
|
_BREEZE_DEFAULT_HELM_VERSION="v3.2.4"
|
|
_BREEZE_DEFAULT_POSTGRES_VERSION="9.6"
|
|
_BREEZE_DEFAULT_POSTGRES_VERSION="9.6"
|
|
_BREEZE_DEFAULT_MYSQL_VERSION="5.7"
|
|
STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION=3.6
|
|
}
|
|
|
|
# Initializes development-friendly virtualenv if you are already in such env. It installs all the necessary
|
|
# packages from PyPI and it case of problems it provides useful hints on what prerequisites should be
|
|
# installed. It also removes and resets the existing AIRFLOW_HOME installation to make sure that you
|
|
# have it synchronized with the version of airflow installed. It resets the airflow's sqlite database to
|
|
# a clean state. You can use this function if your virtualenv is broken, to clean it up
|
|
function initialize_virtualenv() {
|
|
# Check if we are inside virtualenv
|
|
set +e
|
|
echo -e "import sys\nif not hasattr(sys,'base_prefix'):\n sys.exit(1)" | "python${PYTHON_MAJOR_MINOR_VERSION}"
|
|
RES=$?
|
|
set -e
|
|
if [[ ${RES} != "0" ]]; then
|
|
echo >&2
|
|
echo >&2 "ERROR: Initializing local virtualenv only works when you have virtualenv activated"
|
|
echo >&2
|
|
echo >&2 "Please enter your local virtualenv before (for example using 'pyenv activate' or 'workon') "
|
|
echo >&2
|
|
exit 1
|
|
else
|
|
# If no Airflow Home defined - fallback to ${HOME}/airflow
|
|
AIRFLOW_HOME_DIR=${AIRFLOW_HOME:=${HOME}/airflow}
|
|
export CASS_DRIVER_NO_CYTHON="1"
|
|
echo
|
|
echo "Initializing the virtualenv: $(command -v python)!"
|
|
echo
|
|
echo "This will wipe out ${AIRFLOW_HOME_DIR} and reset all the databases!"
|
|
echo
|
|
"${AIRFLOW_SOURCES}/confirm" "Proceeding with the initialization"
|
|
echo
|
|
pushd "${AIRFLOW_SOURCES}"
|
|
set +e
|
|
pip install -e ".[devel]" \
|
|
--constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
|
|
RES=$?
|
|
set -e
|
|
popd
|
|
if [[ ${RES} != "0" ]]; then
|
|
echo "#######################################################################"
|
|
echo " You had some troubles installing the venv !!!!!"
|
|
echo " Try running the command below and rerun virtualenv installation"
|
|
echo
|
|
SYSTEM=$(uname -s)
|
|
if [[ ${SYSTEM} == "Darwin" ]]; then
|
|
echo " brew install sqlite mysql postgresql openssl"
|
|
else
|
|
echo " sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python-dev openssl \\"
|
|
echo " sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql"
|
|
fi
|
|
echo
|
|
echo "#######################################################################"
|
|
exit ${RES}
|
|
fi
|
|
echo
|
|
echo "Wiping and recreating ${AIRFLOW_HOME_DIR}"
|
|
echo
|
|
rm -rvf "${AIRFLOW_HOME_DIR}"
|
|
mkdir -p "${AIRFLOW_HOME_DIR}"
|
|
echo
|
|
echo "Resetting AIRFLOW sqlite database"
|
|
echo
|
|
unset AIRFLOW__CORE__UNIT_TEST_MODE
|
|
airflow db reset -y
|
|
echo
|
|
echo "Resetting AIRFLOW sqlite unit test database"
|
|
echo
|
|
AIRFLOW__CORE__UNIT_TEST_MODE=True airflow db reset -y
|
|
exit 0
|
|
fi
|
|
}
|
|
|
|
|
|
# Sets up autocomplete for Breeze for both - bash and zsh
|
|
function setup_autocomplete() {
|
|
echo "Installing bash/zsh completion for local user"
|
|
echo
|
|
"${AIRFLOW_SOURCES}/confirm" "This will create ~/.bash_completion.d/ directory and modify ~/.*rc files"
|
|
echo
|
|
echo
|
|
mkdir -pv ~/.bash_completion.d
|
|
ln -sf "${AIRFLOW_SOURCES}/breeze-complete" "${HOME}/.bash_completion.d/"
|
|
echo
|
|
echo "Breeze Bash completion is now linked to: ${AIRFLOW_SOURCES}/breeze-complete"
|
|
echo
|
|
local BREEZE_COMMENT="Added by Airflow Breeze autocomplete setup"
|
|
if ! grep "${BREEZE_COMMENT}" "${HOME}/.bashrc" >/dev/null 2>&1; then
|
|
touch ~/.bashrc
|
|
# shellcheck disable=SC2129
|
|
echo "# START: ${BREEZE_COMMENT}" >>~/.bashrc
|
|
cat <<"EOF" >>~/.bashrc
|
|
for BCFILE in ~/.bash_completion.d/* ; do
|
|
. ${BCFILE}
|
|
done
|
|
EOF
|
|
echo "# END: ${BREEZE_COMMENT}" >>~/.bashrc
|
|
echo
|
|
echo "The ${HOME}/.bashrc has been modified"
|
|
echo
|
|
else
|
|
echo
|
|
echo "The ${HOME}/.bashrc was already modified before. Not changing it."
|
|
echo
|
|
fi
|
|
if ! grep "${BREEZE_COMMENT}" "${HOME}/.zshrc" >/dev/null 2>&1; then
|
|
# shellcheck disable=SC2129
|
|
echo "# START: ${BREEZE_COMMENT}" >>~/.zshrc
|
|
cat <<"EOF" >>~/.zshrc
|
|
autoload compinit && compinit
|
|
autoload bashcompinit && bashcompinit
|
|
source ~/.bash_completion.d/breeze-complete
|
|
EOF
|
|
echo "# END: ${BREEZE_COMMENT}" >>~/.zshrc
|
|
echo
|
|
echo "The ${HOME}/.zshrc has been modified"
|
|
echo
|
|
else
|
|
echo
|
|
echo "The ${HOME}/.zshrc was already modified before. Not changing it."
|
|
echo
|
|
fi
|
|
if [[ "${OSTYPE}" == "darwin"* ]]; then
|
|
# For MacOS we have to handle the special case where terminal app DOES NOT run .bashrc by default
|
|
# But re-runs .bash_profile :(
|
|
# See https://scriptingosx.com/2017/04/about-bash_profile-and-bashrc-on-macos/
|
|
if ! grep "${BREEZE_COMMENT}" "${HOME}/.bash_profile"; then
|
|
# shellcheck disable=SC2129
|
|
echo "# START: ${BREEZE_COMMENT}" >>~/.bash_profile
|
|
cat <<"EOF" >>~/.bash_profile
|
|
if [ -r ~/.bashrc ]; then
|
|
source ~/.bashrc
|
|
fi
|
|
EOF
|
|
echo "# END: ${BREEZE_COMMENT}" >>~/.bash_profile
|
|
echo
|
|
echo "The ${HOME}/.bash_profile has been modified"
|
|
echo
|
|
else
|
|
echo
|
|
echo "The ${HOME}/.bash_profile was already modified before. Not changing it."
|
|
echo
|
|
fi
|
|
fi
|
|
echo
|
|
echo
|
|
echo "Breeze completion is installed to ~/.bash_completion.d/breeze-complete"
|
|
echo
|
|
echo "Please exit and re-enter your shell or run:"
|
|
echo
|
|
echo " source ~/.bash_completion.d/breeze-complete"
|
|
echo
|
|
exit 0
|
|
}
|
|
|
|
# Prints information about the current configuration of Breeze - if you enter breeze interactively
|
|
# and you did not suppress cheatsheet or asciiart, it also prints those
|
|
function print_badge {
|
|
if [[ ${BACKEND} == "postgres" ]]; then
|
|
BACKEND_VERSION="${POSTGRES_VERSION}"
|
|
elif [[ ${BACKEND} == "mysql" ]]; then
|
|
BACKEND_VERSION="${MYSQL_VERSION}"
|
|
else
|
|
BACKEND_VERSION=""
|
|
fi
|
|
if [[ ! -f "${SUPPRESS_ASCIIART_FILE}" && ${COMMAND_TO_RUN} == "enter_breeze" ]]; then
|
|
cat <<EOF
|
|
|
|
|
|
|
|
|
|
@&&&&&&@
|
|
@&&&&&&&&&&&@
|
|
&&&&&&&&&&&&&&&&
|
|
&&&&&&&&&&
|
|
&&&&&&&
|
|
&&&&&&&
|
|
@@@@@@@@@@@@@@@@ &&&&&&
|
|
@&&&&&&&&&&&&&&&&&&&&&&&&&&
|
|
&&&&&&&&&&&&&&&&&&&&&&&&&&&&
|
|
&&&&&&&&&&&&
|
|
&&&&&&&&&
|
|
&&&&&&&&&&&&
|
|
@@&&&&&&&&&&&&&&&@
|
|
@&&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
|
|
&&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
|
|
&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
|
|
&&&&&&
|
|
&&&&&&&
|
|
@&&&&&&&&
|
|
@&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
|
|
&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
|
|
&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
|
|
|
|
|
|
|
|
@&&&@ && @&&&&&&&&&&& &&&&&&&&&&&& && &&&&&&&&&& &&& &&& &&&
|
|
&&& &&& && @&& &&& && && &&& &&&@ &&& &&&&& &&&
|
|
&&& &&& && @&&&&&&&&&&&& &&&&&&&&&&& && && &&& &&& &&& &&@ &&&
|
|
&&&&&&&&&&& && @&&&&&&&&& && && &&@ &&& &&@&& &&@&&
|
|
&&& &&& && @&& &&&@ && &&&&&&&&&&& &&&&&&&&&&&& &&&& &&&&
|
|
|
|
&&&&&&&&&&&& &&&&&&&&&&&& &&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&&
|
|
&&& &&& && &&& && &&& &&&& &&
|
|
&&&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&& &&&& &&&&&&&&&&
|
|
&&& && && &&&& && &&& &&&& &&
|
|
&&&&&&&&&&&&& && &&&&@ &&&&&&&&&&&@ &&&&&&&&&&&& @&&&&&&&&&&& &&&&&&&&&&&
|
|
|
|
EOF
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
cat <<EOF
|
|
|
|
Use production image.
|
|
|
|
Branch name: ${BRANCH_NAME}
|
|
Docker image: ${AIRFLOW_PROD_IMAGE}
|
|
Airflow source version: $(get_airflow_version_from_production_image)
|
|
EOF
|
|
else
|
|
cat <<EOF
|
|
|
|
Use CI image.
|
|
|
|
Branch name: ${BRANCH_NAME}
|
|
Docker image: ${AIRFLOW_CI_IMAGE}
|
|
Airflow source version: ${AIRFLOW_VERSION}
|
|
EOF
|
|
fi
|
|
cat <<EOF
|
|
Python version: ${PYTHON_MAJOR_MINOR_VERSION}
|
|
DockerHub user: ${DOCKERHUB_USER}
|
|
DockerHub repo: ${DOCKERHUB_REPO}
|
|
Backend: ${BACKEND} ${BACKEND_VERSION}
|
|
EOF
|
|
if [[ ${INSTALL_AIRFLOW_VERSION} != "" || ${INSTALL_AIRFLOW_REFERENCE} != "" ]]; then
|
|
cat <<EOF
|
|
|
|
Airflow installed from: ${INSTALL_AIRFLOW_VERSION}${INSTALL_AIRFLOW_REFERENCE}
|
|
|
|
EOF
|
|
fi
|
|
else
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
cat <<EOF
|
|
|
|
Production image.
|
|
|
|
Branch name: ${BRANCH_NAME}
|
|
Docker image: ${AIRFLOW_PROD_IMAGE}
|
|
EOF
|
|
else
|
|
cat <<EOF
|
|
|
|
CI image.
|
|
|
|
Branch name: ${BRANCH_NAME}
|
|
Docker image: ${AIRFLOW_CI_IMAGE}
|
|
EOF
|
|
fi
|
|
cat <<EOF
|
|
|
|
Airflow source version: ${AIRFLOW_VERSION}
|
|
Python version: ${PYTHON_MAJOR_MINOR_VERSION}
|
|
DockerHub user: ${DOCKERHUB_USER}
|
|
DockerHub repo: ${DOCKERHUB_REPO}
|
|
Backend: ${BACKEND} ${BACKEND_VERSION}
|
|
EOF
|
|
if [[ ${INSTALL_AIRFLOW_VERSION} != "" || ${INSTALL_AIRFLOW_REFERENCE} != "" ]]; then
|
|
cat <<EOF
|
|
|
|
Airflow installed from: ${INSTALL_AIRFLOW_VERSION}${INSTALL_AIRFLOW_REFERENCE}
|
|
|
|
EOF
|
|
fi
|
|
fi
|
|
}
|
|
|
|
# Prepares command file that can be used to easily run the commands without the need of using Breeze
|
|
# The command file generated in cache ./build directory is a standalone script that contains
|
|
# All the environment variables and docker-compose configuration to run the command. This is because
|
|
# depending on configuration of Breeze we might have different compose files used and different variables
|
|
# set. Those are a convenience scripts that you might use to debug command execution although
|
|
# In most cases they are used internally by Breeze
|
|
function prepare_command_file() {
|
|
local FILE="${1}"
|
|
local CMD="${2}"
|
|
local COMPOSE_FILE="${3}"
|
|
local AIRFLOW_IMAGE="${4}"
|
|
cat <<EOF > "${FILE}"
|
|
#!/usr/bin/env bash
|
|
if [[ \${VERBOSE} == "true" ]]; then
|
|
echo
|
|
echo "Executing script:"
|
|
echo
|
|
echo "\${BASH_SOURCE[0]} \${@}"
|
|
echo
|
|
set -x
|
|
fi
|
|
cd "\$( dirname "\${BASH_SOURCE[0]}" )" || exit
|
|
export DOCKERHUB_USER=${DOCKERHUB_USER}
|
|
export DOCKERHUB_REPO=${DOCKERHUB_REPO}
|
|
HOST_USER_ID=\$(id -ur)
|
|
export HOST_USER_ID
|
|
HOST_GROUP_ID=\$(id -gr)
|
|
export HOST_GROUP_ID
|
|
export HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
|
|
export COMPOSE_FILE="${COMPOSE_FILE}"
|
|
export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}"
|
|
export BACKEND="${BACKEND}"
|
|
export AIRFLOW_VERSION="${AIRFLOW_VERSION}"
|
|
export INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
|
|
export WEBSERVER_HOST_PORT="${WEBSERVER_HOST_PORT}"
|
|
export POSTGRES_HOST_PORT="${POSTGRES_HOST_PORT}"
|
|
export POSTGRES_VERSION="${POSTGRES_VERSION}"
|
|
export MYSQL_HOST_PORT="${MYSQL_HOST_PORT}"
|
|
export MYSQL_VERSION="${MYSQL_VERSION}"
|
|
export AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
|
|
export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}"
|
|
export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
|
|
export AIRFLOW_IMAGE="${AIRFLOW_IMAGE}"
|
|
export SQLITE_URL="${SQLITE_URL}"
|
|
docker-compose --log-level INFO ${CMD}
|
|
EOF
|
|
chmod u+x "${FILE}"
|
|
}
|
|
|
|
|
|
# Prepare all command files that we are using. Depending on the command to execute we use different
|
|
# convenience scripts:
|
|
# cmd_run_ci - to run CI image command (for example entering Breeze, or running a script in CI image)
|
|
# cmd_run_prod - to run PROD image command (for example entering Prod image or running a script there)
|
|
# test_run_ci - to run test target in CI image
|
|
# dc_ci - to run docker compose command for CI image
|
|
# dc_prod - to run docker compose command for PROD image
|
|
#
|
|
function prepare_command_files() {
|
|
MAIN_CI_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/base.yml
|
|
MAIN_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/base.yml
|
|
BACKEND_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml
|
|
LOCAL_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local.yml
|
|
LOCAL_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local-prod.yml
|
|
REMOVE_SOURCES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml
|
|
FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml
|
|
|
|
COMPOSE_CI_FILE=${MAIN_CI_DOCKER_COMPOSE_FILE}:${BACKEND_DOCKER_COMPOSE_FILE}
|
|
COMPOSE_PROD_FILE=${MAIN_PROD_DOCKER_COMPOSE_FILE}:${BACKEND_DOCKER_COMPOSE_FILE}
|
|
|
|
if [[ "${MOUNT_LOCAL_SOURCES}" != "false" ]]; then
|
|
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${LOCAL_DOCKER_COMPOSE_FILE}
|
|
COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${LOCAL_PROD_DOCKER_COMPOSE_FILE}
|
|
fi
|
|
|
|
if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
|
|
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE}
|
|
COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE}
|
|
fi
|
|
|
|
if [[ ${INSTALL_AIRFLOW_VERSION} != "" ]]; then
|
|
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${REMOVE_SOURCES_DOCKER_COMPOSE_FILE}
|
|
fi
|
|
|
|
set +u
|
|
# shellcheck disable=SC2207
|
|
UNIQUE_INTEGRATIONS=($(echo "${INTEGRATIONS[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' '))
|
|
|
|
for _INT in "${UNIQUE_INTEGRATIONS[@]}"
|
|
do
|
|
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${SCRIPTS_CI_DIR}/docker-compose/integration-${_INT}.yml
|
|
done
|
|
set -u
|
|
|
|
export COMPOSE_CI_FILE
|
|
export COMPOSE_PROD_FILE
|
|
|
|
get_base_image_version
|
|
# Base python image for the build
|
|
export PYTHON_BASE_IMAGE=python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster
|
|
export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
|
|
export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
|
|
export BUILT_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
|
|
|
|
LAST_DC_CI_FILE="dc_ci"
|
|
LAST_DC_PROD_FILE="dc_prod"
|
|
|
|
# Prepare script for "run docker compose CI command"
|
|
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" \
|
|
"\"\${@}\"" "${COMPOSE_CI_FILE}" "${AIRFLOW_CI_IMAGE}"
|
|
|
|
# Prepare script for "run docker compose PROD command"
|
|
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" \
|
|
"\"\${@}\"" "${COMPOSE_PROD_FILE}" "${AIRFLOW_PROD_IMAGE}"
|
|
}
|
|
|
|
# Prints detailed help for all commands and flgas. Used to generate documentation added to BREEZE.rst
|
|
# automatically.
|
|
function do_help_all() {
|
|
echo
|
|
print_line
|
|
usage
|
|
print_line
|
|
echo
|
|
echo
|
|
echo "Detailed usage"
|
|
echo
|
|
print_line
|
|
echo
|
|
for SUBCOMMAND in ${ALL_BREEZE_COMMANDS}
|
|
do
|
|
detailed_usage "${SUBCOMMAND}"
|
|
print_line
|
|
echo
|
|
done
|
|
echo
|
|
flags
|
|
}
|
|
|
|
# Parses all arguments that can be passed to Breeze command - that includes command to run and flags.
|
|
function parse_arguments() {
|
|
set -u
|
|
if ! PARAMS=$(getopt \
|
|
-o "${_BREEZE_GETOPT_SHORT_OPTIONS:=}" \
|
|
-l "${_BREEZE_GETOPT_LONG_OPTIONS:=}" \
|
|
--name "$CMDNAME" -- "$@")
|
|
then
|
|
flags
|
|
exit 1
|
|
fi
|
|
|
|
eval set -- "${PARAMS}"
|
|
unset PARAMS
|
|
|
|
# Parse Flags.
|
|
# Please update short and long options in the breeze-complete script
|
|
# This way autocomplete will work out-of-the-box
|
|
while true
|
|
do
|
|
case "${1}" in
|
|
-h|--help)
|
|
RUN_HELP="true"
|
|
shift ;;
|
|
-p|--python)
|
|
export PYTHON_MAJOR_MINOR_VERSION="${2}";
|
|
echo "Python version: ${PYTHON_MAJOR_MINOR_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
-b|--backend)
|
|
export BACKEND="${2}";
|
|
echo "Backend: ${BACKEND}"
|
|
echo
|
|
shift 2 ;;
|
|
-i|--integration)
|
|
INTEGRATION=${2}
|
|
check_and_save_allowed_param "INTEGRATION" "integration" "--integration"
|
|
echo "Integration: ${INTEGRATION}"
|
|
if [[ ${INTEGRATION} == "all" ]]; then
|
|
for _INT in ${_BREEZE_ALLOWED_INTEGRATIONS}
|
|
do
|
|
if [[ ${_INT} != "all" ]]; then
|
|
echo "${_INT}"
|
|
INTEGRATIONS+=("${_INT}")
|
|
fi
|
|
done
|
|
else
|
|
INTEGRATIONS+=("${INTEGRATION}");
|
|
fi
|
|
echo
|
|
shift 2 ;;
|
|
-K|--kubernetes-mode)
|
|
export KUBERNETES_MODE="${2}";
|
|
echo "Kubernetes mode: ${KUBERNETES_MODE}"
|
|
echo
|
|
shift 2 ;;
|
|
-V|--kubernetes-version)
|
|
export KUBERNETES_VERSION="${2}";
|
|
echo "Kubernetes version: ${KUBERNETES_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
--kind-version)
|
|
export KIND_VERSION="${2}";
|
|
echo "Kind version: ${KIND_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
--helm-version)
|
|
export HELM_VERSION="${2}";
|
|
echo "Helm version: ${HELM_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
--postgres-version)
|
|
export POSTGRES_VERSION="${2}";
|
|
echo "Postgres version: ${POSTGRES_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
--mysql-version)
|
|
export MYSQL_VERSION="${2}";
|
|
echo "MySQL version: ${MYSQL_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
-l|--skip-mounting-local-sources)
|
|
MOUNT_LOCAL_SOURCES="false"
|
|
echo "Mount local sources: ${MOUNT_LOCAL_SOURCES}"
|
|
echo
|
|
shift ;;
|
|
-a|--install-airflow-version)
|
|
INSTALL_AIRFLOW_VERSION="${2}"
|
|
# Reference is exclusive with version
|
|
INSTALL_AIRFLOW_REFERENCE=""
|
|
echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION}"
|
|
echo
|
|
shift 2 ;;
|
|
-t|--install-airflow-reference)
|
|
INSTALL_AIRFLOW_REFERENCE="${2}"
|
|
# Reference is exclusive with version
|
|
INSTALL_AIRFLOW_VERSION=""
|
|
echo "Installs Airflow from reference: ${INSTALL_AIRFLOW_REFERENCE}"
|
|
echo
|
|
shift 2 ;;
|
|
-d|--db-reset)
|
|
echo "Resetting the DB!"
|
|
echo
|
|
export DB_RESET="true"
|
|
shift 1 ;;
|
|
-v|--verbose)
|
|
export VERBOSE="true"
|
|
echo "Verbose output"
|
|
echo
|
|
shift ;;
|
|
-y|--assume-yes)
|
|
export FORCE_ANSWER_TO_QUESTIONS="yes"
|
|
echo "Assuming 'yes' answer to all questions."
|
|
echo
|
|
shift ;;
|
|
-n|--assume-no)
|
|
export FORCE_ANSWER_TO_QUESTIONS="no"
|
|
echo "Assuming 'no' answer to all questions."
|
|
echo
|
|
shift ;;
|
|
-q|--assume-quit)
|
|
export FORCE_ANSWER_TO_QUESTIONS="quit"
|
|
echo "Assuming 'quit' answer to all questions."
|
|
echo
|
|
shift ;;
|
|
-F|--force-build-images)
|
|
echo "Force build images"
|
|
echo
|
|
export FORCE_BUILD_IMAGES="true"
|
|
# if you want to force build an image - assume you want to build it :)
|
|
export FORCE_ANSWER_TO_QUESTIONS="yes"
|
|
shift ;;
|
|
-C|--force-clean-images)
|
|
echo "Clean build of images without cache"
|
|
echo
|
|
export DOCKER_CACHE="no-cache"
|
|
export FORCE_BUILD_IMAGES="true"
|
|
shift ;;
|
|
-L|--build-cache-local)
|
|
echo "Use local cache to build images"
|
|
echo
|
|
export DOCKER_CACHE="local"
|
|
shift ;;
|
|
-U|--build-cache-pulled)
|
|
echo "Use pulled cache to build images"
|
|
echo
|
|
export DOCKER_CACHE="pulled"
|
|
shift ;;
|
|
-X|--build-cache-disabled)
|
|
echo "Use disabled cache to build images"
|
|
echo
|
|
export DOCKER_CACHE="disabled"
|
|
shift ;;
|
|
-P|--force-pull-images)
|
|
echo "Force pulling images before build. Uses pulled images as cache."
|
|
echo
|
|
export FORCE_PULL_IMAGES="true"
|
|
export FORCE_BUILD_IMAGES="true"
|
|
# if you want to force build an image - assume you want to build it :)
|
|
export FORCE_ANSWER_TO_QUESTIONS="yes"
|
|
shift ;;
|
|
-I|--production-image)
|
|
export PRODUCTION_IMAGE="true"
|
|
export SQLITE_URL=
|
|
echo
|
|
echo "*************** PRODUCTION IMAGE *************************"
|
|
echo
|
|
shift ;;
|
|
-E|--extras)
|
|
export AIRFLOW_EXTRAS="${2}"
|
|
echo "Extras : ${AIRFLOW_EXTRAS}"
|
|
shift 2 ;;
|
|
--additional-extras)
|
|
export ADDITIONAL_AIRFLOW_EXTRAS="${2}"
|
|
echo "Additional extras : ${ADDITIONAL_AIRFLOW_EXTRAS}"
|
|
shift 2 ;;
|
|
--additional-python-deps)
|
|
export ADDITIONAL_PYTHON_DEPS="${2}"
|
|
echo "Additional python dependencies: ${ADDITIONAL_PYTHON_DEPS}"
|
|
shift 2 ;;
|
|
--additional-dev-deps)
|
|
export ADDITIONAL_DEV_DEPS="${2}"
|
|
echo "Additional apt dev dependencies: ${ADDITIONAL_DEV_DEPS}"
|
|
shift 2 ;;
|
|
--additional-runtime-deps)
|
|
export ADDITIONAL_RUNTIME_DEPS="${2}"
|
|
echo "Additional apt runtime dependencies: ${ADDITIONAL_RUNTIME_DEPS}"
|
|
shift 2 ;;
|
|
-D|--dockerhub-user)
|
|
export DOCKERHUB_USER="${2}"
|
|
echo "Dockerhub user ${DOCKERHUB_USER}"
|
|
echo
|
|
shift 2 ;;
|
|
-R|--dockerhub-repo)
|
|
export DOCKERHUB_REPO="${2}"
|
|
echo "Dockerhub repo ${DOCKERHUB_REPO}"
|
|
echo
|
|
shift 2 ;;
|
|
-f|--forward-credentials)
|
|
echo "Forwarding credentials. Be careful as your credentials ar available in the container!"
|
|
echo
|
|
export FORWARD_CREDENTIALS="true"
|
|
shift 1 ;;
|
|
-c|--github-registry)
|
|
echo
|
|
echo "Use github registry"
|
|
echo
|
|
export USE_GITHUB_REGISTRY="true"
|
|
shift ;;
|
|
-G|--github-organisation)
|
|
echo
|
|
echo "GitHub organisation"
|
|
echo
|
|
export GITHUB_ORGANISATION="${2}"
|
|
shift 2;;
|
|
-g|--github-repo)
|
|
echo
|
|
echo "GitHub repository"
|
|
echo
|
|
export GITHUB_REPOSITORY="${2}"
|
|
shift 2;;
|
|
-S|--version-suffix-for-pypi)
|
|
if [[ ${VERSION_SUFFIX_FOR_SVN} != "" ]]; then
|
|
echo
|
|
echo "You can only set one version suffix - either for PyPI or for SVN"
|
|
echo
|
|
exit 1
|
|
fi
|
|
export VERSION_SUFFIX_FOR_PYPI="${2}"
|
|
echo "Version suffix for PyPI ${VERSION_SUFFIX_FOR_PYPI}"
|
|
echo
|
|
shift 2 ;;
|
|
-N|--version-suffix-for-svn)
|
|
if [[ ${VERSION_SUFFIX_FOR_PYPI} != "" ]]; then
|
|
echo
|
|
echo "You can only set one version suffix - either for PyPI or for SVN"
|
|
echo
|
|
exit 1
|
|
fi
|
|
export VERSION_SUFFIX_FOR_SVN="${2}"
|
|
echo "Version suffix for SVN ${VERSION_SUFFIX_FOR_SVN}"
|
|
echo
|
|
shift 2 ;;
|
|
--)
|
|
shift ;
|
|
break ;;
|
|
*)
|
|
flags
|
|
echo >&2
|
|
echo >&2 "ERROR: Unknown flag ${1}"
|
|
echo >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
# Parse commands
|
|
if [[ "$#" -ne 0 ]]; then
|
|
case "${1}" in
|
|
shell)
|
|
LAST_SUBCOMMAND="${1}"
|
|
shift ;;
|
|
exec)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="run_exec"
|
|
shift ;;
|
|
build-docs)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="build_docs"
|
|
shift 1 ;;
|
|
build-image)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="build_image"
|
|
# if you want to build an image - assume you want to build it :)
|
|
export FORCE_ANSWER_TO_QUESTIONS="yes"
|
|
# and assume you want to build it no matter if it is needed
|
|
export FORCE_BUILD_IMAGES="true"
|
|
echo "Build image"
|
|
echo
|
|
shift ;;
|
|
cleanup-image)
|
|
LAST_SUBCOMMAND="${1}"
|
|
echo "Cleanup the image"
|
|
echo
|
|
COMMAND_TO_RUN="cleanup_image"
|
|
shift ;;
|
|
docker-compose)
|
|
LAST_SUBCOMMAND="${1}"
|
|
if [[ $# -lt 2 ]]; then
|
|
echo "You should specify docker compose command to run"
|
|
shift
|
|
RUN_HELP="true"
|
|
else
|
|
DOCKER_COMPOSE_COMMAND="${2}"
|
|
shift 2
|
|
fi
|
|
COMMAND_TO_RUN="run_docker_compose"
|
|
;;
|
|
generate-constraints)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="perform_generate_constraints"
|
|
export FORCE_ANSWER_TO_QUESTIONS="yes"
|
|
export FORCE_BUILD_IMAGES="true"
|
|
export UPGRADE_TO_LATEST_CONSTRAINTS="true"
|
|
shift ;;
|
|
prepare-backport-packages)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="perform_prepare_backport_packages"
|
|
shift ;;
|
|
prepare-backport-readme)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="perform_prepare_backport_readme"
|
|
shift ;;
|
|
push-image)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="perform_push_image"
|
|
export SKIP_CHECK_REMOTE_IMAGE="true"
|
|
shift ;;
|
|
initialize-local-virtualenv)
|
|
LAST_SUBCOMMAND="${1}"
|
|
echo "Initializing local virtualenv"
|
|
echo
|
|
COMMAND_TO_RUN="perform_initialize_local_virtualenv"
|
|
shift ;;
|
|
kind-cluster)
|
|
LAST_SUBCOMMAND="${1}"
|
|
# Force local cache strategy for all kind-cluster operations
|
|
# this helps to iterate with production images
|
|
DOCKER_CACHE="local"
|
|
COMMAND_TO_RUN="manage_kind_cluster"
|
|
export KIND_CLUSTER_OPERATION="${2:-}"
|
|
if [[ ${KIND_CLUSTER_OPERATION} != "" ]]; then
|
|
shift 2
|
|
else
|
|
shift
|
|
fi
|
|
;;
|
|
setup-autocomplete)
|
|
LAST_SUBCOMMAND="${1}"
|
|
echo "Setting up autocomplete"
|
|
echo
|
|
COMMAND_TO_RUN="perform_setup_autocomplete"
|
|
shift ;;
|
|
static-check )
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="perform_static_checks"
|
|
if [[ "$#" -lt 2 ]]; then
|
|
if [[ ${RUN_HELP} != "true" ]]; then
|
|
echo "You should specify static check that you would like to run or 'all' to run all checks."
|
|
echo
|
|
echo "One of :"
|
|
echo
|
|
echo "${_BREEZE_ALLOWED_STATIC_CHECKS:=}"
|
|
echo
|
|
echo "For example:"
|
|
echo
|
|
echo "${CMDNAME} static-check mypy"
|
|
echo
|
|
exit 1
|
|
else
|
|
shift
|
|
fi
|
|
else
|
|
export PYTHON_MAJOR_MINOR_VERSION=${STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION}
|
|
export STATIC_CHECK="${2:-}"
|
|
export STATIC_CHECK_ALL_FILES="false"
|
|
EXTRA_STATIC_CHECK_OPTIONS+=("--show-diff-on-failure")
|
|
shift 2
|
|
fi
|
|
;;
|
|
stop)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="run_docker_compose"
|
|
DOCKER_COMPOSE_COMMAND="down"
|
|
EXTRA_DC_OPTIONS+=("--remove-orphans")
|
|
shift ;;
|
|
restart)
|
|
LAST_SUBCOMMAND="${1}"
|
|
COMMAND_TO_RUN="run_docker_compose"
|
|
DOCKER_COMPOSE_COMMAND="down"
|
|
EXTRA_DC_OPTIONS+=("--remove-orphans")
|
|
SECOND_COMMAND_TO_RUN="enter_breeze"
|
|
echo "Restarts the environment. Includes emptying the databases."
|
|
shift ;;
|
|
tests)
|
|
LAST_SUBCOMMAND="${1}"
|
|
if [[ $# -lt 2 ]]; then
|
|
RUN_HELP="true"
|
|
else
|
|
shift
|
|
fi
|
|
COMMAND_TO_RUN="run_tests" ;;
|
|
toggle-suppress-cheatsheet)
|
|
LAST_SUBCOMMAND="${1}"
|
|
if [[ -f "${SUPPRESS_CHEATSHEET_FILE}" ]]; then
|
|
rm -f "${SUPPRESS_CHEATSHEET_FILE}"
|
|
else
|
|
touch "${SUPPRESS_CHEATSHEET_FILE}"
|
|
fi
|
|
echo "Toggle suppress cheatsheet"
|
|
echo
|
|
shift ;;
|
|
toggle-suppress-asciiart)
|
|
LAST_SUBCOMMAND="${1}"
|
|
if [[ -f "${SUPPRESS_ASCIIART_FILE}" ]]; then
|
|
rm -f "${SUPPRESS_ASCIIART_FILE}"
|
|
else
|
|
touch "${SUPPRESS_ASCIIART_FILE}"
|
|
fi
|
|
echo "Toggle suppress asciiart"
|
|
echo
|
|
shift ;;
|
|
flags)
|
|
flags
|
|
exit 0 ;;
|
|
help)
|
|
usage
|
|
exit 0 ;;
|
|
help-all)
|
|
do_help_all
|
|
exit 0 ;;
|
|
*)
|
|
usage
|
|
echo >&2
|
|
echo >&2 "ERROR: Unknown command ${1}"
|
|
echo >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
else
|
|
:
|
|
# By default, start interactive terminal
|
|
fi
|
|
|
|
if [[ ${RUN_HELP} == "true" ]]; then
|
|
if [[ ${LAST_SUBCOMMAND} == "" ]]; then
|
|
usage
|
|
flag_footer
|
|
else
|
|
detailed_usage "${LAST_SUBCOMMAND}"
|
|
fi
|
|
exit 0
|
|
fi
|
|
REMAINING_ARGS+=("$@")
|
|
}
|
|
|
|
# Prepares nicely formatted versions of list of allowed and default values defined in Breeze.
|
|
# It is used in help command to print the lists in a readable format and fold the lists
|
|
# so that they fit the screen width.
|
|
function prepare_formatted_versions() {
|
|
INDENT=15
|
|
LIST_PREFIX=$(printf "%-${INDENT}s" " ")
|
|
WIDTH=$((SCREEN_WIDTH - INDENT))
|
|
FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS=$(echo "${_BREEZE_ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_BACKENDS=$(echo "${_BREEZE_ALLOWED_BACKENDS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_STATIC_CHECKS=$(echo "${_BREEZE_ALLOWED_STATIC_CHECKS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_INTEGRATIONS=$(echo "${_BREEZE_ALLOWED_INTEGRATIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_KUBERNETES_MODES=$(echo "${_BREEZE_ALLOWED_KUBERNETES_MODES=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_KUBERNETES_VERSIONS=$(echo "${_BREEZE_ALLOWED_KUBERNETES_VERSIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_KIND_VERSIONS=$(echo "${_BREEZE_ALLOWED_KIND_VERSIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_HELM_VERSIONS=$(echo "${_BREEZE_ALLOWED_HELM_VERSIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_KIND_OPERATIONS=$(echo "${_BREEZE_ALLOWED_KIND_OPERATIONS=""}" | tr '\n' ' ' | \
|
|
fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_INSTALL_AIRFLOW_VERSIONS=$(echo "${_BREEZE_ALLOWED_INSTALL_AIRFLOW_VERSIONS=""}" | \
|
|
tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_POSTGRES_VERSIONS=$(echo "${_BREEZE_ALLOWED_POSTGRES_VERSIONS=""}" | \
|
|
tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_MYSQL_VERSIONS=$(echo "${_BREEZE_ALLOWED_MYSQL_VERSIONS=""}" | \
|
|
tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/")
|
|
FORMATTED_DEFAULT_CI_EXTRAS=$(echo "${DEFAULT_CI_EXTRAS=}" | \
|
|
tr ',' ' ' | fold -w "${WIDTH}" -s | sed "s/ /,/g; s/^/${LIST_PREFIX}/")
|
|
FORMATTED_DEFAULT_PROD_EXTRAS=$(echo "${DEFAULT_PROD_EXTRAS=}" | \
|
|
tr ',' ' ' | fold -w "${WIDTH}" -s | sed "s/ /,/g; s/^/${LIST_PREFIX}/")
|
|
}
|
|
|
|
# Prepares usage information for all the commands in Breeze.
|
|
# Those usage commands are stored in appropriate environment variables.
|
|
function prepare_usage() {
|
|
# Note that MacOS uses Bash 3.* and we cannot use associative arrays
|
|
export USAGE_SHELL="[Default] Enters interactive shell in the container"
|
|
export USAGE_EXEC="Execs into running breeze container in new terminal"
|
|
export USAGE_BUILD_DOCS="Builds documentation in the container"
|
|
export USAGE_BUILD_IMAGE="Builds CI or Production docker image"
|
|
export USAGE_CLEANUP_IMAGE="Cleans up the container image created"
|
|
export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command"
|
|
export USAGE_FLAGS="Shows all breeze's flags"
|
|
export USAGE_GENERATE_CONSTRAINTS="Generates pinned constraint files"
|
|
export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv"
|
|
export USAGE_PREPARE_BACKPORT_README="Prepares backport packages readme files"
|
|
export USAGE_PREPARE_BACKPORT_PACKAGES="Prepares backport packages"
|
|
export USAGE_PUSH_IMAGE="Pushes images to registry"
|
|
export USAGE_KIND_CLUSTER="Manages KinD cluster on the host"
|
|
export USAGE_SETUP_AUTOCOMPLETE="Sets up autocomplete for breeze"
|
|
export USAGE_STOP="Stops the docker-compose environment"
|
|
export USAGE_RESTART="Stops the docker-compose environment including DB cleanup"
|
|
export USAGE_STATIC_CHECK="Performs selected static check for changed files"
|
|
export USAGE_STATIC_CHECK_ALL_FILES="Performs selected static check for all files"
|
|
export USAGE_TOGGLE_SUPPRESS_CHEATSHEET="Toggles on/off cheatsheet"
|
|
export USAGE_TOGGLE_SUPPRESS_ASCIIART="Toggles on/off asciiart"
|
|
export USAGE_TESTS="Runs selected tests in the container"
|
|
export USAGE_HELP="Shows this help message"
|
|
export USAGE_HELP_ALL="Shows detailed help for all commands and flags"
|
|
|
|
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_SHELL="
|
|
${CMDNAME} shell [FLAGS] [-- <EXTRA_ARGS>]
|
|
|
|
This is default subcommand if no subcommand is used.
|
|
|
|
Enters interactive shell where you can run all tests, start Airflow webserver, scheduler,
|
|
workers, interact with the database, run DAGs etc. It is the default command if no command
|
|
is selected. The shell is executed in the container and in case integrations are chosen,
|
|
the integrations will be started as separated docker containers - under the docker-compose
|
|
supervision. Local sources are by default mounted to within the container so you can edit
|
|
them locally and run tests immediately in the container. Several folders ('files', 'dist')
|
|
are also mounted so that you can exchange files between the host and container.
|
|
|
|
The 'files/airflow-breeze-config/variables.env' file can contain additional variables
|
|
and setup. This file is automatically sourced when you enter the container. Database
|
|
and webserver ports are forwarded to appropriate database/webserver so that you can
|
|
connect to it from your host environment.
|
|
|
|
You can also pass <EXTRA_ARGS> after -- they will be passed as bash parameters, this is
|
|
especially useful to pass bash options, for example -c to execute command:
|
|
|
|
'${CMDNAME} shell -- -c \"ls -la\"'
|
|
|
|
Flags:
|
|
$(flag_footer)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_SHELL
|
|
export DETAILED_USAGE_EXEC="
|
|
${CMDNAME} exec [-- <EXTRA_ARGS>]
|
|
|
|
Execs into interactive shell to an already running container. The container mus be started
|
|
already by breeze shell command. If you are not familiar with tmux, this is the best
|
|
way to run multiple processes in the same container at the same time for example scheduler,
|
|
webserver, workers, database console and interactive terminal.
|
|
"
|
|
export DETAILED_USAGE_BUILD_DOCS="
|
|
${CMDNAME} build-docs
|
|
|
|
Builds Airflow documentation. The documentation is build inside docker container - to
|
|
maintain the same build environment for everyone. Appropriate sources are mapped from
|
|
the host to the container so that latest sources are used. The folders where documentation
|
|
is generated ('docs/build') are also mounted to the container - this way results of
|
|
the documentation build is available in the host.
|
|
"
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_BUILD_IMAGE="
|
|
${CMDNAME} build-image [FLAGS]
|
|
|
|
Builds docker image (CI or production) without entering the container. You can pass
|
|
additional options to this command, such as '--force-build-image',
|
|
'--force-pull-image', '--python', '--build-cache-local' or '-build-cache-pulled'
|
|
in order to modify build behaviour.
|
|
|
|
You can also pass '--production-image' flag to build production image rather than CI image.
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
$(flag_choose_different_airflow_version)
|
|
$(flag_production_image)
|
|
$(flag_build_docker_images)
|
|
$(flag_pull_push_docker_images)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_BUILD_IMAGE
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_CLEANUP_IMAGE="
|
|
${CMDNAME} cleanup-image [FLAGS]
|
|
|
|
Removes the breeze-related images created in your local docker image cache. This will
|
|
not reclaim space in docker cache. You need to 'docker system prune' (optionally
|
|
with --all) to reclaim that space.
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
$(flag_production_image)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_CLEANUP_IMAGE
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_DOCKER_COMPOSE="
|
|
${CMDNAME} docker-compose [FLAGS] COMMAND [-- <EXTRA_ARGS>]
|
|
|
|
Run docker-compose command instead of entering the environment. Use 'help' as command
|
|
to see available commands. The <EXTRA_ARGS> passed after -- are treated
|
|
as additional options passed to docker-compose. For example
|
|
|
|
'${CMDNAME} docker-compose pull -- --ignore-pull-failures'
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
$(flag_backend_variants)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_DOCKER_COMPOSE
|
|
export DETAILED_USAGE_FLAGS="
|
|
Explains in detail all the flags that can be used with breeze.
|
|
"
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_PREPARE_BACKPORT_README="
|
|
${CMDNAME} prepare-backport-packages [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...]
|
|
|
|
Prepares README.md files for backport packages. You can provide (after --) optional version
|
|
in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for.
|
|
If the first parameter is not formatted as a date, then today is regenerated.
|
|
If no packages are specified, readme for all packages are generated.
|
|
If no date is specified, current date + 3 days is used (allowing for PMC votes to pass).
|
|
|
|
Examples:
|
|
|
|
'${CMDNAME} prepare-backport-readme' or
|
|
'${CMDNAME} prepare-backport-readme 2020.05.10' or
|
|
'${CMDNAME} prepare-backport-readme 2020.05.10 https google amazon'
|
|
|
|
General form:
|
|
|
|
'${CMDNAME} prepare-backport-readme YYYY.MM.DD <PACKAGE_ID> ...'
|
|
|
|
* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date
|
|
cannot be earlier than the already released version (the script will fail if it
|
|
will be). It can be set in the future anticipating the future release date.
|
|
|
|
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
|
'google' but in several cases, it might be one level deeper separated with
|
|
'.' for example 'apache.hive'
|
|
|
|
Flags:
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_PREPARE_BACKPORT_README
|
|
DETAILED_USAGE_GENERATE_CONSTRAINTS="
|
|
${CMDNAME} generate-constraints [FLAGS]
|
|
|
|
Generates pinned constraint files from setup.py. Those files are generated in files folder
|
|
- separate files for different python version. Those constraint files when pushed to orphan
|
|
constraint-master and constraint-1-10 branches are used to generate repeatable
|
|
CI builds as well as run repeatable production image builds. You can use those constraints
|
|
to predictably install released Airflow versions. This is mainly used to test the constraint
|
|
generation - constraints are pushed to the orphan branches by a successful scheduled
|
|
CRON job in CI automatically.
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_GENERATE_CONSTRAINTS
|
|
DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV="
|
|
${CMDNAME} initialize-local-virtualenv [FLAGS]
|
|
|
|
Initializes locally created virtualenv installing all dependencies of Airflow
|
|
taking into account the constraints for the version specified.
|
|
This local virtualenv can be used to aid auto-completion and IDE support as
|
|
well as run unit tests directly from the IDE. You need to have virtualenv
|
|
activated before running this command.
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_PREPARE_BACKPORT_PACKAGES="
|
|
${CMDNAME} prepare-backport-packages [FLAGS] [PACKAGE_ID ...]
|
|
|
|
Prepares backport packages. You can provide (after --) optional list of packages to prepare.
|
|
If no packages are specified, readme for all packages are generated. You can specify optional
|
|
--version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or
|
|
--version-suffix-for-pypi flag to generate rc candidates for PyPI packages.
|
|
|
|
Examples:
|
|
|
|
'${CMDNAME} prepare-backport-packages' or
|
|
'${CMDNAME} prepare-backport-packages google' or
|
|
'${CMDNAME} prepare-backport-packages --version-suffix-for-svn rc1 http google amazon' or
|
|
'${CMDNAME} prepare-backport-packages --version-suffix-for-pypi rc1 http google amazon'
|
|
|
|
General form:
|
|
|
|
'${CMDNAME} prepare-backport-packages \\
|
|
[--version-suffix-for-svn|--version-suffix-for-pypi] <PACKAGE_ID> ...'
|
|
|
|
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
|
'google'), but in several cases, it might be one level deeper separated with '.'
|
|
for example 'apache.hive'
|
|
|
|
Flags:
|
|
$(flag_version_suffix)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_PREPARE_BACKPORT_PACKAGES
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_PUSH_IMAGE="
|
|
${CMDNAME} push_image [FLAGS]
|
|
|
|
Pushes images to docker registry. You can push the images to DockerHub registry (default)
|
|
or to the GitHub registry (if --github-registry flag is used).
|
|
|
|
For DockerHub pushes --dockerhub-user and --dockerhub-repo flags can be used to specify
|
|
the repository to push to. For GitHub repository --github-organisation and --github-repo
|
|
flags can be used for the same purpose.
|
|
|
|
You can also add --production-image flag to switch to production image (default is CI one)
|
|
|
|
Examples:
|
|
|
|
'${CMDNAME} push-image' or
|
|
'${CMDNAME} push-image --dockerhub-user user' to push to your private registry or
|
|
'${CMDNAME} push-image --production-image' - to push production image or
|
|
'${CMDNAME} push-image --github-registry' - to push to GitHub image registry or
|
|
'${CMDNAME} push-image --github-registry --github-organisation org' - for other organisation
|
|
|
|
Flags:
|
|
$(flag_pull_push_docker_images)
|
|
$(flag_verbosity)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_PUSH_IMAGE
|
|
DETAILED_USAGE_KIND_CLUSTER="
|
|
${CMDNAME} kind-cluster [FLAGS] OPERATION
|
|
|
|
Manages host-side Kind Kubernetes cluster that is used to run Kubernetes integration tests.
|
|
It allows to start/stop/restart/status the Kind Kubernetes cluster and deploy Airflow to it.
|
|
This enables you to run tests inside the breeze environment with latest airflow images loaded.
|
|
Note that in case of deploying airflow, the first step is to rebuild the image and loading it
|
|
to the cluster so you can also pass appropriate build image flags that will influence
|
|
rebuilding the production image. Operation is one of:
|
|
|
|
${FORMATTED_KIND_OPERATIONS}
|
|
|
|
Flags:
|
|
$(flag_airflow_variants)
|
|
$(flag_build_docker_images)
|
|
"
|
|
export DETAILED_USAGE_KIND_CLUSTER
|
|
export DETAILED_USAGE_SETUP_AUTOCOMPLETE="
|
|
${CMDNAME} setup-autocomplete
|
|
|
|
Sets up autocomplete for breeze commands. Once you do it you need to re-enter the bash
|
|
shell and when typing breeze command <TAB> will provide autocomplete for
|
|
parameters and values.
|
|
"
|
|
export DETAILED_USAGE_STOP="
|
|
${CMDNAME} stop
|
|
|
|
Brings down running docker compose environment. When you start the environment, the docker
|
|
containers will continue running so that startup time is shorter. But they take quite a lot of
|
|
memory and CPU. This command stops all running containers from the environment.
|
|
"
|
|
DETAILED_USAGE_RESTART="
|
|
${CMDNAME} restart [FLAGS]
|
|
|
|
Restarts running docker compose environment. When you restart the environment, the docker
|
|
containers will be restarted. That includes cleaning up the databases. This is
|
|
especially useful if you switch between different versions of Airflow.
|
|
|
|
Flags:
|
|
$(flag_footer)
|
|
"
|
|
export DETAILED_USAGE_RESTART
|
|
export DETAILED_USAGE_STATIC_CHECK="
|
|
${CMDNAME} static-check [FLAGS] STATIC_CHECK [-- <EXTRA_ARGS>]
|
|
|
|
Run selected static checks for currently changed files. You should specify static check that
|
|
you would like to run or 'all' to run all checks. One of:
|
|
|
|
${FORMATTED_STATIC_CHECKS}
|
|
|
|
You can pass extra arguments including options to to the pre-commit framework as
|
|
<EXTRA_ARGS> passed after --. For example:
|
|
|
|
'${CMDNAME} static-check mypy' or
|
|
'${CMDNAME} static-check mypy -- --files tests/core.py'
|
|
'${CMDNAME} static-check mypy -- --all-files'
|
|
|
|
You can see all the options by adding --help EXTRA_ARG:
|
|
|
|
'${CMDNAME} static-check mypy -- --help'
|
|
"
|
|
# shellcheck disable=SC2089
|
|
DETAILED_USAGE_TESTS="
|
|
${CMDNAME} tests [FLAGS] [TEST_TARGET ..] [-- <EXTRA_ARGS>]
|
|
|
|
Run the specified unit test target. There might be multiple
|
|
targets specified separated with comas. The <EXTRA_ARGS> passed after -- are treated
|
|
as additional options passed to pytest. You can pass 'tests' as target to
|
|
run all tests. For example:
|
|
|
|
'${CMDNAME} tests tests/test_core.py -- --logging-level=DEBUG'
|
|
'${CMDNAME} tests tests
|
|
|
|
Flags:
|
|
$(flag_footer)
|
|
"
|
|
# shellcheck disable=SC2090
|
|
export DETAILED_USAGE_TESTS
|
|
export DETAILED_USAGE_TOGGLE_SUPPRESS_CHEATSHEET="
|
|
${CMDNAME} toggle-suppress-cheatsheet
|
|
|
|
Toggles on/off cheatsheet displayed before starting bash shell.
|
|
"
|
|
export DETAILED_USAGE_TOGGLE_SUPPRESS_ASCIIART="
|
|
${CMDNAME} toggle-suppress-asciiart
|
|
|
|
Toggles on/off asciiart displayed before starting bash shell.
|
|
"
|
|
export DETAILED_USAGE_HELP="
|
|
${CMDNAME} help
|
|
|
|
Shows general help message for all commands.
|
|
"
|
|
export DETAILED_USAGE_HELP_ALL="
|
|
${CMDNAME} help-all
|
|
|
|
Shows detailed help for all commands and flags.
|
|
"
|
|
}
|
|
|
|
# Gets environment variable value converting the lowercase name of command into variable name
|
|
function get_variable_from_lowercase_name() {
|
|
PREFIX="${1}"
|
|
NAME="${2}"
|
|
SUFFIX="$(echo "${NAME}" | tr "[:lower:]-" "[:upper:]_")"
|
|
VARIABLE_NAME="${PREFIX}_${SUFFIX}"
|
|
echo "${!VARIABLE_NAME}"
|
|
}
|
|
|
|
# Gets usage information from lowercase command
|
|
function get_usage() {
|
|
get_variable_from_lowercase_name USAGE "${1}"
|
|
}
|
|
|
|
|
|
# Gets detailed usage information from lowercase command
|
|
function get_detailed_usage() {
|
|
get_variable_from_lowercase_name DETAILED_USAGE "${1}"
|
|
}
|
|
|
|
# Prints general usage information
|
|
function usage() {
|
|
echo "
|
|
|
|
Usage: ${CMDNAME} [FLAGS] [COMMAND] -- <EXTRA_ARGS>
|
|
|
|
By default the script enters IT environment and drops you to bash shell, but you can choose one
|
|
of the commands to run specific actions instead. Add --help after each command to see details:
|
|
|
|
Commands without arguments:
|
|
"
|
|
for SUBCOMMAND in ${BREEZE_COMMANDS}
|
|
do
|
|
printf " %-40s %s\n" "${SUBCOMMAND}" "$(get_usage "${SUBCOMMAND}")"
|
|
done
|
|
echo "
|
|
Commands with arguments:
|
|
"
|
|
for SUBCOMMAND in ${BREEZE_EXTRA_ARG_COMMANDS}
|
|
do
|
|
printf " %-30s%-10s %s\n" "${SUBCOMMAND}" "<ARG>" "$(get_usage "${SUBCOMMAND}")"
|
|
done
|
|
echo "
|
|
Help commands:
|
|
"
|
|
for SUBCOMMAND in ${BREEZE_HELP_COMMANDS}
|
|
do
|
|
printf " %-40s %s\n" "${SUBCOMMAND}" "$(get_usage "${SUBCOMMAND}")"
|
|
done
|
|
echo
|
|
}
|
|
|
|
# Prints detailed usage for command specified
|
|
function detailed_usage() {
|
|
SUBCOMMAND=${1}
|
|
echo "
|
|
|
|
Detailed usage for command: ${SUBCOMMAND}
|
|
|
|
$(get_detailed_usage "${SUBCOMMAND}")
|
|
|
|
"
|
|
}
|
|
|
|
# Prints flag footer
|
|
function flag_footer() {
|
|
echo "
|
|
Run '${CMDNAME} flags' to see all applicable flags.
|
|
"
|
|
}
|
|
|
|
# Prints flags for different variants of airflow to use
|
|
function flag_airflow_variants() {
|
|
echo "
|
|
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
|
Python version used for the image. This is always major/minor version.
|
|
|
|
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
|
--install-airflow-version or --install-airflow-reference flags.
|
|
|
|
One of:
|
|
|
|
${FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS}
|
|
"
|
|
}
|
|
|
|
# Prints flags for different backend to use
|
|
function flag_backend_variants() {
|
|
echo "
|
|
-b, --backend <BACKEND>
|
|
Backend to use for tests - it determines which database is used.
|
|
One of:
|
|
|
|
${FORMATTED_BACKENDS}
|
|
|
|
Default: ${_BREEZE_DEFAULT_BACKEND:=}
|
|
|
|
--postgres-version <POSTGRES_VERSION>
|
|
Postgres version used. One of:
|
|
|
|
${FORMATTED_POSTGRES_VERSIONS}
|
|
|
|
--mysql-version <MYSQL_VERSION>
|
|
Mysql version used. One of:
|
|
|
|
${FORMATTED_MYSQL_VERSIONS}
|
|
"
|
|
}
|
|
|
|
# Prints production image flgas
|
|
function flag_production_image() {
|
|
echo "
|
|
-I, --production-image
|
|
Use production image for entering the environment and builds (not for tests).
|
|
"
|
|
}
|
|
|
|
# Prints additional breeze action flags
|
|
function flag_breeze_actions() {
|
|
echo "
|
|
-d, --db-reset
|
|
Resets the database at entry to the environment. It will drop all the tables
|
|
and data and recreate the DB from scratch even if 'restart' command was not used.
|
|
Combined with 'restart' command it enters the environment in the state that is
|
|
ready to start Airflow webserver/scheduler/worker. Without the switch, the database
|
|
does not have any tables and you need to run reset db manually.
|
|
|
|
-i, --integration <INTEGRATION>
|
|
Integration to start during tests - it determines which integrations are started
|
|
for integration tests. There can be more than one integration started, or all to
|
|
start all integrations. Selected integrations are not saved for future execution.
|
|
One of:
|
|
|
|
${FORMATTED_INTEGRATIONS}
|
|
"
|
|
}
|
|
|
|
# Prints Kubernetes action flags
|
|
function flag_kubernetes_configuration() {
|
|
echo "
|
|
Configuration for the KinD Kubernetes cluster and tests:
|
|
|
|
-K, --kubernetes-mode <KUBERNETES_MODE>
|
|
Kubernetes mode - only used in case one of --kind-cluster-* commands is used.
|
|
One of:
|
|
|
|
${FORMATTED_KUBERNETES_MODES}
|
|
|
|
Default: ${_BREEZE_DEFAULT_KUBERNETES_MODE:=}
|
|
|
|
-V, --kubernetes-version <KUBERNETES_VERSION>
|
|
Kubernetes version - only used in case one of --kind-cluster-* commands is used.
|
|
One of:
|
|
|
|
${FORMATTED_KUBERNETES_VERSIONS}
|
|
|
|
Default: ${_BREEZE_DEFAULT_KUBERNETES_VERSION:=}
|
|
|
|
--kind-version <KIND_VERSION>
|
|
Kind version - only used in case one of --kind-cluster-* commands is used.
|
|
One of:
|
|
|
|
${FORMATTED_KIND_VERSIONS}
|
|
|
|
Default: ${_BREEZE_DEFAULT_KIND_VERSION:=}
|
|
|
|
--helm-version <HELM_VERSION>
|
|
Helm version - only used in case one of --kind-cluster-* commands is used.
|
|
One of:
|
|
|
|
${FORMATTED_HELM_VERSIONS}
|
|
|
|
Default: ${_BREEZE_DEFAULT_HELM_VERSION:=}
|
|
|
|
"
|
|
}
|
|
|
|
# Prints flags that determine what is the source mounting scheme
|
|
function flag_local_file_mounting() {
|
|
echo "
|
|
-l, --skip-mounting-local-sources
|
|
Skips mounting local volume with sources - you get exactly what is in the
|
|
docker image rather than your current local sources of Airflow.
|
|
"
|
|
}
|
|
|
|
# Prints flags that allow to choose different airflow variants
|
|
function flag_choose_different_airflow_version() {
|
|
echo "
|
|
-a, --install-airflow-version <INSTALL_AIRFLOW_VERSION>
|
|
If specified, installs Airflow directly from PIP released version. This happens at
|
|
image building time in production image and at container entering time for CI image. One of:
|
|
|
|
${FORMATTED_INSTALL_AIRFLOW_VERSIONS}
|
|
|
|
-t, --install-airflow-reference <INSTALL_AIRFLOW_REFERENCE>
|
|
If specified, installs Airflow directly from reference in GitHub. This happens at
|
|
image building time in production image and at container entering time for CI image.
|
|
"
|
|
}
|
|
|
|
# Prints flags that allow to set assumed answers to questions
|
|
function flag_assume_answers_to_questions() {
|
|
echo "
|
|
-y, --assume-yes
|
|
Assume 'yes' answer to all questions.
|
|
|
|
-n, --assume-no
|
|
Assume 'no' answer to all questions.
|
|
|
|
-q, --assume-quit
|
|
Assume 'quit' answer to all questions.
|
|
"
|
|
}
|
|
|
|
# Prints flags that are used for credential forwarding
|
|
function flag_credentials() {
|
|
echo "
|
|
-f, --forward-credentials
|
|
Forwards host credentials to docker container. Use with care as it will make
|
|
your credentials available to everything you install in Docker.
|
|
"
|
|
}
|
|
|
|
# Prints flags that control verbosity
|
|
function flag_verbosity() {
|
|
echo "
|
|
-v, --verbose
|
|
Show verbose information about executed commands (enabled by default for running test).
|
|
Note that you can further increase verbosity and see all the commands executed by breeze
|
|
by running 'export VERBOSE_COMMANDS=\"true\"' before running breeze.
|
|
"
|
|
}
|
|
|
|
function flag_help() {
|
|
echo "
|
|
-h, --help
|
|
Shows detailed help message for the command specified.
|
|
"
|
|
}
|
|
|
|
# Prints flags controlling docker build process
|
|
function flag_build_docker_images() {
|
|
echo "
|
|
-F, --force-build-images
|
|
Forces building of the local docker images. The images are rebuilt
|
|
automatically for the first time or when changes are detected in
|
|
package-related files, but you can force it using this flag.
|
|
|
|
-P, --force-pull-images
|
|
Forces pulling of images from DockerHub before building to populate cache. The
|
|
images are pulled by default only for the first time you run the
|
|
environment, later the locally build images are used as cache.
|
|
|
|
-E, --extras
|
|
Extras to pass to build images The default are different for CI and production images:
|
|
|
|
CI image:
|
|
${FORMATTED_DEFAULT_CI_EXTRAS}
|
|
|
|
Production image:
|
|
${FORMATTED_DEFAULT_PROD_EXTRAS}
|
|
|
|
--additional-extras
|
|
Additional extras to pass to build images The default is no additional extras.
|
|
|
|
--additional-python-deps
|
|
Additional python dependencies to use when building the images.
|
|
|
|
--additional-dev-deps
|
|
Additional apt dev dependencies to use when building the images.
|
|
|
|
--additional-runtime-deps
|
|
Additional apt runtime dependencies to use when building the images.
|
|
|
|
-C, --force-clean-images
|
|
Force build images with cache disabled. This will remove the pulled or build images
|
|
and start building images from scratch. This might take a long time.
|
|
|
|
-L, --build-cache-local
|
|
Uses local cache to build images. No pulled images will be used, but results of local
|
|
builds in the Docker cache are used instead. This will take longer than when the pulled
|
|
cache is used for the first time, but subsequent '--build-cache-local' builds will be
|
|
faster as they will use mostly the locally build cache.
|
|
|
|
This is default strategy used by the Production image builds.
|
|
|
|
-U, --build-cache-pulled
|
|
Uses images pulled from registry (either DockerHub or GitHub depending on
|
|
--github-registry flag) to build images. The pulled images will be used as cache.
|
|
Those builds are usually faster than when ''--build-cache-local'' with the exception if
|
|
the registry images are not yet updated. The DockerHub images are updated nightly and the
|
|
GitHub images are updated after merges to master so it might be that the images are still
|
|
outdated vs. the latest version of the Dockerfiles you are using. In this case, the
|
|
''--build-cache-local'' might be faster, especially if you iterate and change the
|
|
Dockerfiles yourself.
|
|
|
|
This is default strategy used by the CI image builds.
|
|
|
|
-X, --build-cache-disabled
|
|
Disables cache during docker builds. This is useful if you want to make sure you want to
|
|
rebuild everything from scratch.
|
|
|
|
This strategy is used by default for both Production and CI images for the scheduled
|
|
(nightly) builds in CI.
|
|
|
|
"
|
|
}
|
|
|
|
# Prints flags controlling docker pull and push process
|
|
function flag_pull_push_docker_images() {
|
|
echo "
|
|
-D, --dockerhub-user
|
|
DockerHub user used to pull, push and build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_USER:=}.
|
|
|
|
-H, --dockerhub-repo
|
|
DockerHub repository used to pull, push, build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_REPO:=}.
|
|
|
|
-c, --github-registry
|
|
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
|
|
DockerHub. You need to be logged in to the registry in order to be able to pull/push from it
|
|
and you need to be committer to push to Apache Airflow' GitHub registry.
|
|
|
|
-G, --github-organisation
|
|
GitHub organisation used to pull, push images when cache is used. Default: ${_BREEZE_DEFAULT_GITHUB_ORGANISATION:=}.
|
|
|
|
-g, --github-repo
|
|
GitHub repository used to pull, push images when cache is used. Default: ${_BREEZE_DEFAULT_GITHUB_REPO:=}.
|
|
|
|
"
|
|
}
|
|
|
|
# Prints flags that control version of generated packages
|
|
function flag_version_suffix() {
|
|
echo "
|
|
-S, --version-suffix-for-pypi
|
|
Adds optional suffix to the version in the generated backport package. It can be used
|
|
to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
|
|
|
|
-N, --version-suffix-for-svn
|
|
Adds optional suffix to the generated names of package. It can be used to generate
|
|
rc1/rc2 ... versions of the packages to be uploaded to SVN.
|
|
"
|
|
}
|
|
|
|
|
|
|
|
# Prints all flags
|
|
function flags() {
|
|
echo "
|
|
$(print_line)
|
|
|
|
Summary of all flags supported by Breeze:
|
|
|
|
$(print_star_line)
|
|
Choose Airflow variant
|
|
$(flag_airflow_variants)
|
|
|
|
$(print_star_line)
|
|
Choose backend to run for Airflow
|
|
$(flag_backend_variants)
|
|
|
|
$(print_star_line)
|
|
Enable production image
|
|
$(flag_production_image)
|
|
|
|
$(print_star_line)
|
|
Additional actions executed while entering breeze
|
|
$(flag_breeze_actions)
|
|
|
|
$(print_star_line)
|
|
Kind kubernetes and Kubernetes tests configuration(optional)
|
|
$(flag_kubernetes_configuration)
|
|
|
|
$(print_star_line)
|
|
Manage mounting local files
|
|
$(flag_local_file_mounting)
|
|
|
|
$(print_star_line)
|
|
Assume answers to questions
|
|
$(flag_assume_answers_to_questions)
|
|
|
|
$(print_star_line)
|
|
Choose different Airflow version to install or run
|
|
$(flag_choose_different_airflow_version)
|
|
|
|
$(print_star_line)
|
|
Credentials
|
|
$(flag_credentials)
|
|
|
|
$(print_star_line)
|
|
Flags for building Docker images (both CI and production)
|
|
$(flag_build_docker_images)
|
|
|
|
$(print_star_line)
|
|
Flags for pulling/pushing Docker images (both CI and production)
|
|
$(flag_pull_push_docker_images)
|
|
|
|
$(print_star_line)
|
|
Flags for generation of the backport packages
|
|
$(flag_version_suffix)
|
|
|
|
$(print_star_line)
|
|
Increase verbosity of the scripts
|
|
$(flag_verbosity)
|
|
|
|
$(print_star_line)
|
|
Print detailed help message
|
|
$(flag_help)
|
|
"
|
|
}
|
|
|
|
# Prints header line filling screen width - only when VERBOSE is set
|
|
function print_header_line() {
|
|
if [ ${VERBOSE:="false"} == "true" ]; then
|
|
echo
|
|
printf '=%.0s' $(seq "${SCREEN_WIDTH}")
|
|
echo
|
|
fi
|
|
}
|
|
|
|
|
|
# Prints separation line filling screen width
|
|
function print_line {
|
|
printf '#%.0s' $(seq "${SCREEN_WIDTH}")
|
|
}
|
|
|
|
# Prints star line filling screen width
|
|
function print_star_line {
|
|
printf '*%.0s' $(seq "${SCREEN_WIDTH}")
|
|
}
|
|
|
|
# Reads save environment variables. Some of the variables are stored across session so that once
|
|
# you use them you do not have to use it next time. That makes those flags persistent
|
|
# An example of it is `--backend` or '--kubernetes-mode' flags. Note that PYTHON_MAJOR_MINOR_VERSION is
|
|
# not read here - it is read at the `setup_default_breeze_variables` method because it is needed
|
|
# t determine the right images to use and set several variables that depend on the Python version
|
|
function read_saved_environment_variables {
|
|
export BACKEND="${BACKEND:=$(read_from_file BACKEND)}"
|
|
export BACKEND=${BACKEND:-${_BREEZE_DEFAULT_BACKEND}}
|
|
|
|
export KUBERNETES_MODE="${KUBERNETES_MODE:=$(read_from_file KUBERNETES_MODE)}"
|
|
export KUBERNETES_MODE=${KUBERNETES_MODE:=${_BREEZE_DEFAULT_KUBERNETES_MODE}}
|
|
|
|
export KUBERNETES_VERSION="${KUBERNETES_VERSION:=$(read_from_file KUBERNETES_VERSION)}"
|
|
export KUBERNETES_VERSION=${KUBERNETES_VERSION:=${_BREEZE_DEFAULT_KUBERNETES_VERSION}}
|
|
|
|
export KIND_VERSION="${KIND_VERSION:=$(read_from_file KIND_VERSION)}"
|
|
export KIND_VERSION=${KIND_VERSION:=${_BREEZE_DEFAULT_KIND_VERSION}}
|
|
|
|
export HELM_VERSION="${HELM_VERSION:=$(read_from_file HELM_VERSION)}"
|
|
export HELM_VERSION=${HELM_VERSION:=${_BREEZE_DEFAULT_HELM_VERSION}}
|
|
|
|
export POSTGRES_VERSION="${POSTGRES_VERSION:=$(read_from_file POSTGRES_VERSION)}"
|
|
export POSTGRES_VERSION=${POSTGRES_VERSION:=${_BREEZE_DEFAULT_POSTGRES_VERSION}}
|
|
|
|
export MYSQL_VERSION="${MYSQL_VERSION:=$(read_from_file MYSQL_VERSION)}"
|
|
export MYSQL_VERSION=${MYSQL_VERSION:=${_BREEZE_DEFAULT_MYSQL_VERSION}}
|
|
|
|
# Here you read DockerHub user/account that you use
|
|
# You can populate your own images in DockerHub this way and work with the,
|
|
# You can override it with "--dockerhub-user" option and it will be stored in .build directory
|
|
export DOCKERHUB_USER="${DOCKERHUB_USER:=$(read_from_file DOCKERHUB_USER)}"
|
|
export DOCKERHUB_USER="${DOCKERHUB_USER:=${_BREEZE_DEFAULT_DOCKERHUB_USER}}"
|
|
|
|
# Here you read DockerHub repo that you use
|
|
# You can populate your own images in DockerHub this way and work with them
|
|
# You can override it with "--dockerhub-repo" option and it will be stored in .build directory
|
|
export DOCKERHUB_REPO="${DOCKERHUB_REPO:=$(read_from_file DOCKERHUB_REPO)}"
|
|
export DOCKERHUB_REPO="${DOCKERHUB_REPO:=${_BREEZE_DEFAULT_DOCKERHUB_REPO}}"
|
|
}
|
|
|
|
# Checks if variables are correctly set and if they are - saves them so that they can be used across
|
|
# sessions.
|
|
function check_and_save_all_params() {
|
|
check_and_save_allowed_param "PYTHON_MAJOR_MINOR_VERSION" "Python version" "--python"
|
|
|
|
if [[ "${INSTALL_AIRFLOW_REFERENCE:=}" != "" ]]; then
|
|
if [[ ${INSTALL_AIRFLOW_REFERENCE} == *1_10* ]]; then
|
|
export BRANCH_NAME="v1-10-test"
|
|
fi
|
|
elif [[ "${INSTALL_AIRFLOW_VERSION:=}" != "" ]]; then
|
|
if [[ ${INSTALL_AIRFLOW_VERSION} == *1.10* ]]; then
|
|
export BRANCH_NAME="v1-10-test"
|
|
fi
|
|
fi
|
|
|
|
if [[ ${PYTHON_MAJOR_MINOR_VERSION} == "2.7" || ${PYTHON_MAJOR_MINOR_VERSION} == "3.5" ]]; then
|
|
if [[ ${BRANCH_NAME} == "master" ]]; then
|
|
echo >&2
|
|
echo >&2 "The ${PYTHON_MAJOR_MINOR_VERSION} can only be used when installing Airflow 1.10.*"
|
|
echo >&2
|
|
echo >&2 "You can use it only when you specify 1.10 Airflow via --install-airflow-version"
|
|
echo >&2 "or --install-airflow-reference and they point to 1.10 version of Airflow"
|
|
echo >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
check_and_save_allowed_param "BACKEND" "backend" "--backend"
|
|
check_and_save_allowed_param "KUBERNETES_MODE" "Kubernetes mode" "--kubernetes-mode"
|
|
check_and_save_allowed_param "KUBERNETES_VERSION" "Kubernetes version" "--kubernetes-version"
|
|
check_and_save_allowed_param "KIND_VERSION" "KinD version" "--kind-version"
|
|
check_and_save_allowed_param "HELM_VERSION" "Helm version" "--helm-version"
|
|
check_and_save_allowed_param "POSTGRES_VERSION" "Postgres version" "--postgres-version"
|
|
check_and_save_allowed_param "MYSQL_VERSION" "Mysql version" "--mysql-version"
|
|
|
|
# Can't verify those - they can be anything, so let's just save them
|
|
save_to_file DOCKERHUB_USER
|
|
save_to_file DOCKERHUB_REPO
|
|
}
|
|
|
|
# Prints cheatsheet if it is not suppressed
|
|
function print_cheatsheet() {
|
|
if [[ ! -f ${SUPPRESS_CHEATSHEET_FILE} && ${COMMAND_TO_RUN} == "enter_breeze" ]]; then
|
|
echo
|
|
echo
|
|
print_line
|
|
echo
|
|
echo " Airflow Breeze CHEATSHEET"
|
|
echo
|
|
set +e
|
|
if ! command -v breeze; then
|
|
print_line
|
|
echo
|
|
echo " Adding breeze to your path:"
|
|
echo " When you exit the environment, you can add sources of Airflow to the path - you can"
|
|
echo " run breeze or the scripts above from any directory by calling 'breeze' commands directly"
|
|
echo
|
|
echo " export PATH=\${PATH}:\"${AIRFLOW_SOURCES}\""
|
|
echo
|
|
fi
|
|
set -e
|
|
print_line
|
|
|
|
echo
|
|
echo " Port forwarding:"
|
|
echo
|
|
echo " Ports are forwarded to the running docker containers for webserver and database"
|
|
echo " * ${WEBSERVER_HOST_PORT} -> forwarded to Airflow webserver -> airflow:8080"
|
|
echo " * ${POSTGRES_HOST_PORT} -> forwarded to Postgres database -> postgres:5432"
|
|
echo " * ${MYSQL_HOST_PORT} -> forwarded to MySQL database -> mysql:3306"
|
|
echo
|
|
echo " Here are links to those services that you can use on host:"
|
|
echo " * Webserver: http://127.0.0.1:28080"
|
|
echo " * Postgres: jdbc:postgresql://127.0.0.1:25433/airflow?user=postgres&password=airflow"
|
|
echo " * Mysql: jdbc:mysql://localhost:23306/airflow?user=root"
|
|
echo
|
|
else
|
|
echo
|
|
fi
|
|
}
|
|
|
|
# Prints setup instruction in case we find that autocomplete is not set
|
|
# also prints how to toggle asciiart/cheatsheet
|
|
function print_setup_instructions {
|
|
if [[ ${COMMAND_TO_RUN} == "enter_breeze" ]] ; then
|
|
# shellcheck disable=SC2034 # Unused variables left for comp_breeze usage
|
|
if ! typeset -f "_comp_breeze" > /dev/null; then
|
|
print_line
|
|
echo
|
|
echo " You can setup autocomplete by running '${CMDNAME} setup-autocomplete'"
|
|
echo
|
|
echo
|
|
fi
|
|
print_line
|
|
echo
|
|
echo " You can toggle ascii/cheatsheet by running:"
|
|
echo " * ${CMDNAME} toggle-suppress-cheatsheet"
|
|
echo " * ${CMDNAME} toggle-suppress-asciiart"
|
|
echo
|
|
print_line
|
|
echo
|
|
echo
|
|
echo
|
|
echo
|
|
fi
|
|
}
|
|
|
|
# Checks that pre-commit is installed and upgrades it if needed
|
|
# this is used in case static check command is used
|
|
function make_sure_precommit_is_installed {
|
|
echo
|
|
echo "Making sure pre-commit is installed"
|
|
echo
|
|
if command -v pip3 >/dev/null; then
|
|
PIP_BIN=pip3
|
|
elif command -v pip >/dev/null; then
|
|
PIP_BIN=pip
|
|
else
|
|
echo >&2
|
|
echo >&2 "ERROR: You need to have pip or pip3 in your PATH"
|
|
echo >&2
|
|
S
|
|
exit 1
|
|
fi
|
|
"${PIP_BIN}" install --upgrade pre-commit >/dev/null 2>&1
|
|
# Add ~/.local/bin to the path in case pip is run outside of virtualenv
|
|
export PATH="${PATH}":~/.local/bin
|
|
}
|
|
|
|
# Removes CI and PROD images and cleans up the flag that indicates that the image was already built
|
|
function remove_images {
|
|
docker rmi "${PYTHON_BASE_IMAGE}" || true
|
|
docker rmi "${AIRFLOW_CI_IMAGE}" || true
|
|
docker rmi "${AIRFLOW_PROD_IMAGE}" || true
|
|
rm -f "${BUILT_IMAGE_FLAG_FILE}"
|
|
}
|
|
|
|
# Runs chosen static checks
|
|
function run_static_checks {
|
|
if [[ ${STATIC_CHECK} == "all" ]]; then
|
|
echo
|
|
echo "Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
echo
|
|
pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
elif [[ ${STATIC_CHECK} == "all-but-pylint" ]]; then
|
|
echo
|
|
echo "Setting SKIP=pylint. Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
echo
|
|
echo
|
|
SKIP=pylint pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
else
|
|
echo
|
|
echo "Running: pre-commit run" "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
echo
|
|
pre-commit run "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
|
|
fi
|
|
}
|
|
|
|
# Runs Build before a command - it will check and if needed rebuild necessary image, depending on the
|
|
# command chosen
|
|
function run_build_command {
|
|
case "${COMMAND_TO_RUN}" in
|
|
run_tests|run_docker_compose)
|
|
prepare_ci_build
|
|
rebuild_ci_image_if_needed
|
|
;;
|
|
enter_breeze)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
prepare_prod_build
|
|
else
|
|
prepare_ci_build
|
|
rebuild_ci_image_if_needed
|
|
fi
|
|
;;
|
|
build_docs|perform_static_checks|perform_generate_constraints|perform_prepare_backport_readme|perform_prepare_backport_packages)
|
|
prepare_ci_build
|
|
rebuild_ci_image_if_needed
|
|
;;
|
|
perform_push_image)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
prepare_prod_build
|
|
else
|
|
prepare_ci_build
|
|
rebuild_ci_image_if_needed
|
|
fi
|
|
;;
|
|
build_image)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
prepare_prod_build
|
|
build_prod_image
|
|
else
|
|
prepare_ci_build
|
|
calculate_md5sum_for_all_files
|
|
build_ci_image
|
|
update_all_md5_files
|
|
build_ci_image_manifest
|
|
fi
|
|
;;
|
|
cleanup_image|run_exec)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
prepare_prod_build
|
|
else
|
|
prepare_ci_build
|
|
fi
|
|
;;
|
|
perform_initialize_local_virtualenv|perform_setup_autocomplete)
|
|
;;
|
|
manage_kind_cluster)
|
|
if [[ ${KIND_CLUSTER_OPERATION} == "start" ]] ; then
|
|
echo "Starts KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "stop" ]] ; then
|
|
echo "Stops KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "restart" ]] ; then
|
|
echo "Restarts KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "recreate" ]] ; then
|
|
echo "Recreates KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "status" ]] ; then
|
|
echo "Checks status of KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "deploy" ]] ; then
|
|
echo "Deploys Airflow to KinD cluster"
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "test" ]] ; then
|
|
echo "Run Kubernetes tests with the KinD cluster "
|
|
elif [[ ${KIND_CLUSTER_OPERATION} == "shell" ]] ; then
|
|
echo "Enter an interactive shell for kubernetes testing"
|
|
else
|
|
echo "ERROR: Unknown Kind Kubernetes cluster operation: '${KIND_CLUSTER_OPERATION}'"
|
|
echo
|
|
echo "Should be one of:"
|
|
echo "${FORMATTED_KIND_OPERATIONS}"
|
|
echo
|
|
exit 1
|
|
fi
|
|
;;
|
|
*)
|
|
echo >&2
|
|
echo >&2 "ERROR: Unknown command to run ${COMMAND_TO_RUN}"
|
|
echo >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
}
|
|
|
|
# Runs the actual command - depending on the command chosen it will use the right
|
|
# Convenient script and run the right command with it
|
|
function run_breeze_command {
|
|
set +u
|
|
case "${COMMAND_TO_RUN}" in
|
|
enter_breeze)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
"${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" run --service-ports --rm airflow "${@}"
|
|
else
|
|
"${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "${@}"
|
|
"${SCRIPTS_CI_DIR}/tools/ci_clear_tmp.sh"
|
|
fi
|
|
;;
|
|
run_exec)
|
|
# Unfortunately docker-compose exec does not support exec'ing into containers started with run :(
|
|
set +e
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
DC_FILE="${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}"
|
|
else
|
|
DC_FILE="${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}"
|
|
fi
|
|
AIRFLOW_TESTING_CONTAINER=$("${DC_FILE}" ps | \
|
|
grep airflow | awk '{print $1}' 2>/dev/null)
|
|
: "${AIRFLOW_TESTING_CONTAINER:?"ERROR! Breeze must be running in order to exec into running container"}"
|
|
set -e
|
|
docker exec -it "${AIRFLOW_TESTING_CONTAINER}" \
|
|
"/opt/airflow/scripts/ci/in_container/entrypoint_exec.sh" "${@}"
|
|
;;
|
|
run_tests)
|
|
export RUN_TESTS="true"
|
|
"${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "$@"
|
|
;;
|
|
run_docker_compose)
|
|
set +u
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
DC_FILE="${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}"
|
|
else
|
|
DC_FILE="${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}"
|
|
fi
|
|
"${DC_FILE}" "${DOCKER_COMPOSE_COMMAND}" "${EXTRA_DC_OPTIONS[@]}" "$@"
|
|
set -u
|
|
;;
|
|
perform_static_checks)
|
|
make_sure_precommit_is_installed
|
|
run_static_checks "${@}"
|
|
;;
|
|
build_image)
|
|
;;
|
|
cleanup_image)
|
|
remove_images
|
|
;;
|
|
perform_generate_constraints)
|
|
run_generate_constraints
|
|
;;
|
|
perform_prepare_backport_packages)
|
|
run_prepare_backport_packages "${@}"
|
|
;;
|
|
perform_prepare_backport_readme)
|
|
run_prepare_backport_readme "${@}"
|
|
;;
|
|
perform_push_image)
|
|
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
|
push_prod_images
|
|
else
|
|
push_ci_image
|
|
fi
|
|
;;
|
|
perform_initialize_local_virtualenv)
|
|
initialize_virtualenv
|
|
;;
|
|
perform_setup_autocomplete)
|
|
setup_autocomplete
|
|
;;
|
|
manage_kind_cluster)
|
|
make_sure_kubernetes_tools_are_installed
|
|
initialize_kind_variables
|
|
perform_kind_cluster_operation "${KIND_CLUSTER_OPERATION}"
|
|
;;
|
|
build_docs)
|
|
run_docs
|
|
;;
|
|
*)
|
|
echo >&2
|
|
echo >&2 "ERROR: Unknown command to run ${COMMAND_TO_RUN}"
|
|
echo >&2
|
|
;;
|
|
esac
|
|
set -u
|
|
}
|
|
|
|
setup_default_breeze_variables
|
|
|
|
initialize_common_environment
|
|
|
|
basic_sanity_checks
|
|
|
|
script_start
|
|
|
|
trap script_end EXIT
|
|
|
|
prepare_formatted_versions
|
|
|
|
prepare_usage
|
|
|
|
set +u
|
|
parse_arguments "${@}"
|
|
|
|
print_header_line
|
|
|
|
forget_last_answer
|
|
|
|
read_saved_environment_variables
|
|
|
|
check_and_save_all_params
|
|
|
|
sanitize_mounted_files
|
|
|
|
prepare_command_files
|
|
|
|
run_build_command
|
|
|
|
print_header_line
|
|
|
|
print_badge
|
|
|
|
print_cheatsheet
|
|
|
|
print_setup_instructions
|
|
|
|
set +u # Account for an empty array
|
|
run_breeze_command "${REMAINING_ARGS[@]}"
|
|
|
|
set +u # Account for an empty array
|
|
if [[ -n ${SECOND_COMMAND_TO_RUN} ]]; then
|
|
COMMAND_TO_RUN=${SECOND_COMMAND_TO_RUN}
|
|
run_breeze_command "${REMAINING_ARGS[@]}"
|
|
fi
|