Improve production image iteration speed (#9162)
For a long time the way how entrypoint worked in ci scripts was wrong. The way it worked was convoluted and short of black magic. This did not allow to pass multiple test targets and required separate execute command scripts in Breeze. This is all now straightened out and both production and CI image are always using the right entrypoint by default and we can simply pass parameters to the image as usual without escaping strings. This also allowed to remove some breeze commands and change names of several flags in Breeze to make them more meaningful. Both CI and PROD image have now embedded scripts for log cleaning. History of image releases is added for 1.10.10-* alpha quality images.
This commit is contained in:
Родитель
639972d995
Коммит
7c12a9d4e0
|
@ -50,7 +50,7 @@
|
|||
!NOTICE
|
||||
!.github
|
||||
!requirements
|
||||
!entrypoint.sh
|
||||
!empty
|
||||
|
||||
# Avoid triggering context change on README change (new companies using Airflow)
|
||||
# So please do not uncomment this line ;)
|
||||
|
|
|
@ -35,7 +35,7 @@ env:
|
|||
VERBOSE: "true"
|
||||
UPGRADE_TO_LATEST_REQUIREMENTS: "false"
|
||||
PYTHON_MAJOR_MINOR_VERSION: 3.6
|
||||
ENABLE_REGISTRY_CACHE: "true"
|
||||
USE_GITHUB_REGISTRY: "true"
|
||||
CACHE_IMAGE_PREFIX: ${{ github.repository }}
|
||||
CACHE_REGISTRY_USERNAME: ${{ github.actor }}
|
||||
CACHE_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
435
BREEZE.rst
435
BREEZE.rst
|
@ -293,7 +293,7 @@ Manage environments - CI (default) or Production - if ``--production-image`` fla
|
|||
|
||||
Interact with CI environment:
|
||||
|
||||
* Run test target specified with ``breeze test-target`` command
|
||||
* Run test target specified with ``breeze tests`` command
|
||||
* Execute arbitrary command in the test environment with ``breeze execute-command`` command
|
||||
* Execute arbitrary docker-compose command with ``breeze docker-compose`` command
|
||||
|
||||
|
@ -707,11 +707,8 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
cleanup-image Cleans up the container image created
|
||||
exec Execs into running breeze container in new terminal
|
||||
generate-requirements Generates pinned requirements for pip dependencies
|
||||
prepare-backport-readme Prepares backport packages readme files
|
||||
prepare-backport-packages Prepares backport packages
|
||||
push-image Pushes images to registry
|
||||
initialize-local-virtualenv Initializes local virtualenv
|
||||
kind-cluster Manages KinD cluster on the host
|
||||
setup-autocomplete Sets up autocomplete for breeze
|
||||
stop Stops the docker-compose environment
|
||||
restart Stops the docker-compose environment including DB cleanup
|
||||
|
@ -721,11 +718,11 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
Commands with arguments:
|
||||
|
||||
docker-compose <ARG> Executes specified docker-compose command
|
||||
execute-command <ARG> Executes specified command in the container
|
||||
kind-cluster <ARG> Manages KinD cluster on the host
|
||||
prepare-backport-readme <ARG> Prepares backport packages readme files
|
||||
prepare-backport-packages <ARG> Prepares backport packages
|
||||
static-check <ARG> Performs selected static check for changed files
|
||||
static-check-all-files <ARG> Performs selected static check for all files
|
||||
test-target <ARG> Runs selected test target in the container
|
||||
tests <ARG> Runs selected tests in the container
|
||||
|
||||
Help commands:
|
||||
|
||||
|
@ -743,7 +740,7 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
Detailed usage for command: shell
|
||||
|
||||
|
||||
breeze shell [FLAGS] -- <EXTRA_ARGS>
|
||||
breeze shell [FLAGS] [-- <EXTRA_ARGS>]
|
||||
|
||||
This is default subcommand if no subcommand is used.
|
||||
|
||||
|
@ -760,6 +757,11 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
and webserver ports are forwarded to appropriate database/webserver so that you can
|
||||
connect to it from your host environment.
|
||||
|
||||
You can also pass <EXTRA_ARGS> after -- they will be passed as bash parameters, this is
|
||||
especially useful to pass bash options, for example -c to execute command:
|
||||
|
||||
'breeze shell -- -c "ls -la"'
|
||||
|
||||
Flags:
|
||||
|
||||
Run 'breeze flags' to see all applicable flags.
|
||||
|
@ -797,9 +799,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
-a, --install-airflow-version <INSTALL_AIRFLOW_VERSION>
|
||||
If specified, installs Airflow directly from PIP released version. This happens at
|
||||
|
@ -860,10 +866,10 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
-H, --dockerhub-repo
|
||||
DockerHub repository used to pull, push, build images. Default: airflow.
|
||||
|
||||
-c, --registry-cache
|
||||
If registry cache is enabled, pulls and pushes are done from the registry cache in github.
|
||||
You need to be logged in to the registry in order to be able to pull/push from it and you
|
||||
need to be committer to push to airflow registry.
|
||||
-c, --github-registry
|
||||
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
|
||||
DockerHub. You need to be logged in to the registry in order to be able to pull/push from it
|
||||
and you need to be committer to push to Apache Airflow' GitHub registry.
|
||||
|
||||
-G, --github-organisation
|
||||
GitHub organisation used to pull, push images when cache is used. Default: apache.
|
||||
|
@ -893,9 +899,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
-I, --production-image
|
||||
Use production image for entering the environment and builds (not for tests).
|
||||
|
@ -912,7 +922,7 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
Detailed usage for command: exec
|
||||
|
||||
|
||||
breeze exec
|
||||
breeze exec [-- <EXTRA_ARGS>]
|
||||
|
||||
Execs into interactive shell to an already running container. The container mus be started
|
||||
already by breeze shell command. If you are not familiar with tmux, this is the best
|
||||
|
@ -938,94 +948,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
Note that you can further increase verbosity and see all the commands executed by breeze
|
||||
by running 'export VERBOSE_COMMANDS="true"' before running breeze.
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: prepare-backport-readme
|
||||
|
||||
|
||||
breeze prepare-backport-packages [FLAGS] -- [YYYY.MM.DD] [PACKAGE_ID ...]
|
||||
|
||||
Prepares README.md files for backport packages. You can provide (after --) optional version
|
||||
in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for.
|
||||
If the first parameter is not formatted as a date, then today is regenerated.
|
||||
If no packages are specified, readme for all packages are generated.
|
||||
If no date is specified, current date + 3 days is used (allowing for PMC votes to pass).
|
||||
|
||||
Examples:
|
||||
|
||||
'breeze prepare-backport-readme' or
|
||||
'breeze prepare-backport-readme -- 2020.05.10' or
|
||||
'breeze prepare-backport-readme -- 2020.05.10 https google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'breeze prepare-backport-readme -- YYYY.MM.DD <PACKAGE_ID> ...'
|
||||
|
||||
* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date
|
||||
cannot be earlier than the already released version (the script will fail if it
|
||||
will be). It can be set in the future anticipating the future release date.
|
||||
|
||||
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
||||
'google' but in several cases, it might be one level deeper separated with
|
||||
'.' for example 'apache.hive'
|
||||
|
||||
Flags:
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
Note that you can further increase verbosity and see all the commands executed by breeze
|
||||
by running 'export VERBOSE_COMMANDS="true"' before running breeze.
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: prepare-backport-packages
|
||||
|
||||
|
||||
breeze prepare-backport-packages [FLAGS] -- [PACKAGE_ID ...]
|
||||
|
||||
Prepares backport packages. You can provide (after --) optional list of packages to prepare.
|
||||
If no packages are specified, readme for all packages are generated. You can specify optional
|
||||
--version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or
|
||||
--version-suffix-for-pypi flag to generate rc candidates for PyPI packages.
|
||||
|
||||
Examples:
|
||||
|
||||
'breeze prepare-backport-packages' or
|
||||
'breeze prepare-backport-packages -- google' or
|
||||
'breeze prepare-backport-packages --version-suffix-for-svn rc1 -- http google amazon' or
|
||||
'breeze prepare-backport-packages --version-suffix-for-pypi rc1 -- http google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'breeze prepare-backport-packages \
|
||||
[--version-suffix-for-svn|--version-suffix-for-pypi] -- <PACKAGE_ID> ...'
|
||||
|
||||
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
||||
'google'), but in several cases, it might be one level deeper separated with '.'
|
||||
for example 'apache.hive'
|
||||
|
||||
Flags:
|
||||
|
||||
-S, --version-suffix-for-pypi
|
||||
Adds optional suffix to the version in the generated backport package. It can be used
|
||||
to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
|
||||
|
||||
-N, --version-suffix-for-svn
|
||||
Adds optional suffix to the generated names of package. It can be used to generate
|
||||
rc1/rc2 ... versions of the packages to be uploaded to SVN.
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
|
@ -1042,7 +971,7 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
breeze push_image [FLAGS]
|
||||
|
||||
Pushes images to docker registry. You can push the images to DockerHub registry (default)
|
||||
or to the GitHub cache registry (if --registry-cache flag is used).
|
||||
or to the GitHub registry (if --github-registry flag is used).
|
||||
|
||||
For DockerHub pushes --dockerhub-user and --dockerhub-repo flags can be used to specify
|
||||
the repository to push to. For GitHub repository --github-organisation and --github-repo
|
||||
|
@ -1055,8 +984,8 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
'breeze push-image' or
|
||||
'breeze push-image --dockerhub-user user' to push to your private registry or
|
||||
'breeze push-image --production-image' - to push production image or
|
||||
'breeze push-image --registry-cache' - to push to GitHub cache or
|
||||
'breeze push-image --registry-cache --github-organisation org' - for other organisation
|
||||
'breeze push-image --github-registry' - to push to GitHub image registry or
|
||||
'breeze push-image --github-registry --github-organisation org' - for other organisation
|
||||
|
||||
Flags:
|
||||
|
||||
|
@ -1066,10 +995,10 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
-H, --dockerhub-repo
|
||||
DockerHub repository used to pull, push, build images. Default: airflow.
|
||||
|
||||
-c, --registry-cache
|
||||
If registry cache is enabled, pulls and pushes are done from the registry cache in github.
|
||||
You need to be logged in to the registry in order to be able to pull/push from it and you
|
||||
need to be committer to push to airflow registry.
|
||||
-c, --github-registry
|
||||
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
|
||||
DockerHub. You need to be logged in to the registry in order to be able to pull/push from it
|
||||
and you need to be committer to push to Apache Airflow' GitHub registry.
|
||||
|
||||
-G, --github-organisation
|
||||
GitHub organisation used to pull, push images when cache is used. Default: apache.
|
||||
|
@ -1101,75 +1030,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: kind-cluster
|
||||
|
||||
|
||||
breeze kind-cluster [FLAGS] OPERATION
|
||||
|
||||
Manages host-side Kind Kubernetes cluster that is used to run Kubernetes integration tests.
|
||||
It allows to start/stop/restart/status the Kind Kubernetes cluster and deploy Airflow to it.
|
||||
This enables you to run tests inside the breeze environment with latest airflow images loaded.
|
||||
Note that in case of deploying airflow, the first step is to rebuild the image and loading it
|
||||
to the cluster so you can also pass appropriate build image flags that will influence
|
||||
rebuilding the production image. Operation is one of:
|
||||
|
||||
start stop restart status deploy test
|
||||
|
||||
Flags:
|
||||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
|
||||
-F, --force-build-images
|
||||
Forces building of the local docker images. The images are rebuilt
|
||||
automatically for the first time or when changes are detected in
|
||||
package-related files, but you can force it using this flag.
|
||||
|
||||
-P, --force-pull-images
|
||||
Forces pulling of images from DockerHub before building to populate cache. The
|
||||
images are pulled by default only for the first time you run the
|
||||
environment, later the locally build images are used as cache.
|
||||
|
||||
-E, --extras
|
||||
Extras to pass to build images The default are different for CI and production images:
|
||||
|
||||
CI image:
|
||||
devel_ci
|
||||
|
||||
Production image:
|
||||
async,aws,azure,celery,dask,elasticsearch,gcp,kubernetes,mysql,postgres,redis,slack,
|
||||
ssh,statsd,virtualenv
|
||||
|
||||
--additional-extras
|
||||
Additional extras to pass to build images The default is no additional extras.
|
||||
|
||||
--additional-python-deps
|
||||
Additional python dependencies to use when building the images.
|
||||
|
||||
--additional-dev-deps
|
||||
Additional apt dev dependencies to use when building the images.
|
||||
|
||||
--additional-runtime-deps
|
||||
Additional apt runtime dependencies to use when building the images.
|
||||
|
||||
-C, --force-clean-images
|
||||
Force build images with cache disabled. This will remove the pulled or build images
|
||||
and start building images from scratch. This might take a long time.
|
||||
|
||||
-L, --use-local-cache
|
||||
Uses local cache to build images. No pulled images will be used, but results of local
|
||||
builds in the Docker cache are used instead.
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
@ -1243,7 +1110,7 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
Detailed usage for command: docker-compose
|
||||
|
||||
|
||||
breeze docker-compose [FLAGS] COMMAND -- <EXTRA_ARGS>
|
||||
breeze docker-compose [FLAGS] COMMAND [-- <EXTRA_ARGS>]
|
||||
|
||||
Run docker-compose command instead of entering the environment. Use 'help' as command
|
||||
to see available commands. The <EXTRA_ARGS> passed after -- are treated
|
||||
|
@ -1255,57 +1122,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
|
||||
-b, --backend <BACKEND>
|
||||
Backend to use for tests - it determines which database is used.
|
||||
One of:
|
||||
|
||||
sqlite mysql postgres
|
||||
|
||||
Default: sqlite
|
||||
|
||||
--postgres-version <POSTGRES_VERSION>
|
||||
Postgres version used. One of:
|
||||
|
||||
9.6 10
|
||||
|
||||
--mysql-version <MYSQL_VERSION>
|
||||
Mysql version used. One of:
|
||||
|
||||
5.7 8
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
Note that you can further increase verbosity and see all the commands executed by breeze
|
||||
by running 'export VERBOSE_COMMANDS="true"' before running breeze.
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: execute-command
|
||||
|
||||
|
||||
breeze execute-command [FLAGS] COMMAND -- <EXTRA_ARGS>
|
||||
|
||||
Run chosen command instead of entering the environment. The command is run using
|
||||
'bash -c "<command with args>" if you need to pass arguments to your command, you need
|
||||
to pass them together with command surrounded with " or '. Alternatively you can
|
||||
pass arguments as <EXTRA_ARGS> passed after --. For example:
|
||||
|
||||
'breeze execute-command "ls -la"' or
|
||||
'breeze execute-command ls -- --la'
|
||||
|
||||
Flags:
|
||||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
-b, --backend <BACKEND>
|
||||
Backend to use for tests - it determines which database is used.
|
||||
|
@ -1352,9 +1175,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
-F, --force-build-images
|
||||
Forces building of the local docker images. The images are rebuilt
|
||||
|
@ -1400,10 +1227,95 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: prepare-backport-readme
|
||||
|
||||
|
||||
breeze prepare-backport-packages [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...]
|
||||
|
||||
Prepares README.md files for backport packages. You can provide (after --) optional version
|
||||
in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for.
|
||||
If the first parameter is not formatted as a date, then today is regenerated.
|
||||
If no packages are specified, readme for all packages are generated.
|
||||
If no date is specified, current date + 3 days is used (allowing for PMC votes to pass).
|
||||
|
||||
Examples:
|
||||
|
||||
'breeze prepare-backport-readme' or
|
||||
'breeze prepare-backport-readme 2020.05.10' or
|
||||
'breeze prepare-backport-readme 2020.05.10 https google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'breeze prepare-backport-readme YYYY.MM.DD <PACKAGE_ID> ...'
|
||||
|
||||
* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date
|
||||
cannot be earlier than the already released version (the script will fail if it
|
||||
will be). It can be set in the future anticipating the future release date.
|
||||
|
||||
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
||||
'google' but in several cases, it might be one level deeper separated with
|
||||
'.' for example 'apache.hive'
|
||||
|
||||
Flags:
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
Note that you can further increase verbosity and see all the commands executed by breeze
|
||||
by running 'export VERBOSE_COMMANDS="true"' before running breeze.
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: prepare-backport-packages
|
||||
|
||||
|
||||
breeze prepare-backport-packages [FLAGS] [PACKAGE_ID ...]
|
||||
|
||||
Prepares backport packages. You can provide (after --) optional list of packages to prepare.
|
||||
If no packages are specified, readme for all packages are generated. You can specify optional
|
||||
--version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or
|
||||
--version-suffix-for-pypi flag to generate rc candidates for PyPI packages.
|
||||
|
||||
Examples:
|
||||
|
||||
'breeze prepare-backport-packages' or
|
||||
'breeze prepare-backport-packages google' or
|
||||
'breeze prepare-backport-packages --version-suffix-for-svn rc1 http google amazon' or
|
||||
'breeze prepare-backport-packages --version-suffix-for-pypi rc1 http google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'breeze prepare-backport-packages \
|
||||
[--version-suffix-for-svn|--version-suffix-for-pypi] <PACKAGE_ID> ...'
|
||||
|
||||
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
||||
'google'), but in several cases, it might be one level deeper separated with '.'
|
||||
for example 'apache.hive'
|
||||
|
||||
Flags:
|
||||
|
||||
-S, --version-suffix-for-pypi
|
||||
Adds optional suffix to the version in the generated backport package. It can be used
|
||||
to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
|
||||
|
||||
-N, --version-suffix-for-svn
|
||||
Adds optional suffix to the generated names of package. It can be used to generate
|
||||
rc1/rc2 ... versions of the packages to be uploaded to SVN.
|
||||
|
||||
-v, --verbose
|
||||
Show verbose information about executed commands (enabled by default for running test).
|
||||
Note that you can further increase verbosity and see all the commands executed by breeze
|
||||
by running 'export VERBOSE_COMMANDS="true"' before running breeze.
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: static-check
|
||||
|
||||
|
||||
breeze static-check [FLAGS] STATIC_CHECK
|
||||
breeze static-check [FLAGS] STATIC_CHECK [-- <EXTRA_ARGS>]
|
||||
|
||||
Run selected static checks for currently changed files. You should specify static check that
|
||||
you would like to run or 'all' to run all checks. One of:
|
||||
|
@ -1424,6 +1336,7 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
'breeze static-check mypy' or
|
||||
'breeze static-check mypy -- --files tests/core.py'
|
||||
'breeze static-check mypy -- --all-files'
|
||||
|
||||
You can see all the options by adding --help EXTRA_ARG:
|
||||
|
||||
|
@ -1433,49 +1346,18 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: static-check-all-files
|
||||
Detailed usage for command: tests
|
||||
|
||||
|
||||
breeze static-check-all [FLAGS] STATIC_CHECK
|
||||
|
||||
Run selected static checks for all applicable files. You should specify static check that
|
||||
you would like to run or 'all' to run all checks. One of:
|
||||
|
||||
all all-but-pylint airflow-config-yaml base-operator bat-tests build
|
||||
build-providers-dependencies check-apache-license check-executables-have-shebangs
|
||||
check-hooks-apply check-integrations check-merge-conflict check-xml
|
||||
consistent-pylint daysago-import-check debug-statements detect-private-key doctoc
|
||||
end-of-file-fixer fix-encoding-pragma flake8 forbid-tabs
|
||||
incorrect-use-of-LoggingMixin insert-license isort language-matters lint-dockerfile
|
||||
mixed-line-ending mypy provide-create-sessions pydevd pylint pylint-tests
|
||||
python-no-log-warn rst-backticks setup-order shellcheck stylelint
|
||||
trailing-whitespace update-breeze-file update-extras update-local-yml-file
|
||||
update-setup-cfg-file yamllint
|
||||
|
||||
You can pass extra arguments including options to the pre-commit framework as
|
||||
<EXTRA_ARGS> passed after --. For example:
|
||||
|
||||
'breeze static-check-all-files mypy' or
|
||||
'breeze static-check-all-files mypy -- --verbose'
|
||||
|
||||
You can see all the options by adding --help EXTRA_ARG:
|
||||
|
||||
'breeze static-check-all-files mypy -- --help'
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
||||
|
||||
Detailed usage for command: test-target
|
||||
|
||||
|
||||
breeze test-target [FLAGS] TEST_TARGET -- <EXTRA_ARGS>
|
||||
breeze tests [FLAGS] [TEST_TARGET ..] [-- <EXTRA_ARGS>]
|
||||
|
||||
Run the specified unit test target. There might be multiple
|
||||
targets specified separated with comas. The <EXTRA_ARGS> passed after -- are treated
|
||||
as additional options passed to pytest. For example:
|
||||
as additional options passed to pytest. You can pass 'tests' as target to
|
||||
run all tests. For example:
|
||||
|
||||
'breeze test-target tests/test_core.py -- --logging-level=DEBUG'
|
||||
'breeze tests tests/test_core.py -- --logging-level=DEBUG'
|
||||
'breeze tests tests
|
||||
|
||||
Flags:
|
||||
|
||||
|
@ -1525,9 +1407,13 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
3.6 3.7 3.8
|
||||
2.7 3.5 3.6 3.7 3.8
|
||||
|
||||
****************************************************************************************************
|
||||
Choose backend to run for Airflow
|
||||
|
@ -1687,10 +1573,10 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
-H, --dockerhub-repo
|
||||
DockerHub repository used to pull, push, build images. Default: airflow.
|
||||
|
||||
-c, --registry-cache
|
||||
If registry cache is enabled, pulls and pushes are done from the registry cache in github.
|
||||
You need to be logged in to the registry in order to be able to pull/push from it and you
|
||||
need to be committer to push to airflow registry.
|
||||
-c, --github-registry
|
||||
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
|
||||
DockerHub. You need to be logged in to the registry in order to be able to pull/push from it
|
||||
and you need to be committer to push to Apache Airflow' GitHub registry.
|
||||
|
||||
-G, --github-organisation
|
||||
GitHub organisation used to pull, push images when cache is used. Default: apache.
|
||||
|
@ -1725,17 +1611,6 @@ This is the current syntax for `./breeze <./breeze>`_:
|
|||
|
||||
.. END BREEZE HELP MARKER
|
||||
|
||||
Convenience Scripts
|
||||
-------------------
|
||||
|
||||
Once you run ``./breeze`` you can also execute various actions via generated convenience scripts:
|
||||
|
||||
.. code-block::
|
||||
|
||||
Enter the environment : ./.build/cmd_run
|
||||
Run command in the environment : ./.build/cmd_run "[command with args]" [bash options]
|
||||
Run tests in the environment : ./.build/test_run [test-target] [pytest options]
|
||||
Run Docker compose command : ./.build/dc [help/pull/...] [docker-compose options]
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
|
|
35
Dockerfile
35
Dockerfile
|
@ -157,6 +157,23 @@ ENV PIP_VERSION=${PIP_VERSION}
|
|||
|
||||
RUN pip install --upgrade pip==${PIP_VERSION}
|
||||
|
||||
ARG AIRFLOW_REPO=apache/airflow
|
||||
ENV AIRFLOW_REPO=${AIRFLOW_REPO}
|
||||
|
||||
ARG AIRFLOW_BRANCH=master
|
||||
ENV AIRFLOW_BRANCH=${AIRFLOW_BRANCH}
|
||||
|
||||
ARG AIRFLOW_EXTRAS
|
||||
ARG ADDITIONAL_AIRFLOW_EXTRAS=""
|
||||
ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_AIRFLOW_EXTRAS}
|
||||
|
||||
# In case of Production build image segment we want to pre-install master version of airflow
|
||||
# dependencies from github so that we do not have to always reinstall it from the scratch.
|
||||
RUN pip install --user \
|
||||
"https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
|
||||
--constraint "https://raw.githubusercontent.com/${AIRFLOW_REPO}/${AIRFLOW_BRANCH}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt" \
|
||||
&& pip uninstall --yes apache-airflow;
|
||||
|
||||
ARG AIRFLOW_SOURCES_FROM="."
|
||||
ENV AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM}
|
||||
|
||||
|
@ -171,10 +188,6 @@ ENV CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY}
|
|||
ARG AIRFLOW_VERSION
|
||||
ENV AIRFLOW_VERSION=${AIRFLOW_VERSION}
|
||||
|
||||
ARG AIRFLOW_EXTRAS
|
||||
ARG ADDITIONAL_AIRFLOW_EXTRAS=""
|
||||
ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_AIRFLOW_EXTRAS}
|
||||
|
||||
ARG ADDITIONAL_PYTHON_DEPS=""
|
||||
ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
|
||||
|
||||
|
@ -214,13 +227,6 @@ RUN \
|
|||
rm -rf "${WWW_DIR}/node_modules"; \
|
||||
fi
|
||||
|
||||
ARG ENTRYPOINT_FILE="entrypoint.sh"
|
||||
ENV ENTRYPOINT_FILE="${ENTRYPOINT_FILE}"
|
||||
|
||||
# hadolint ignore=DL3020
|
||||
ADD ${ENTRYPOINT_FILE} /entrypoint
|
||||
RUN chmod a+x /entrypoint
|
||||
|
||||
##############################################################################################
|
||||
# This is the actual Airflow image - much smaller than the build one. We copy
|
||||
# installed Airflow and all it's dependencies from the build image to make it smaller.
|
||||
|
@ -333,7 +339,11 @@ RUN mkdir -pv "${AIRFLOW_HOME}"; \
|
|||
chown -R "airflow" "${AIRFLOW_HOME}"
|
||||
|
||||
COPY --chown=airflow:airflow --from=airflow-build-image /root/.local "/home/airflow/.local"
|
||||
COPY --chown=airflow:airflow --from=airflow-build-image /entrypoint /entrypoint
|
||||
|
||||
COPY scripts/prod/entrypoint_prod.sh /entrypoint
|
||||
COPY scripts/prod/clean-logs.sh /clean-logs
|
||||
|
||||
RUN chmod a+x /entrypoint /clean-logs
|
||||
|
||||
USER airflow
|
||||
|
||||
|
@ -345,6 +355,5 @@ ENV AIRFLOW__CORE__LOAD_EXAMPLES="false"
|
|||
|
||||
EXPOSE 8080
|
||||
|
||||
COPY scripts/include/clean-logs.sh /usr/local/bin/clean-airflow-logs
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"]
|
||||
CMD ["--help"]
|
||||
|
|
|
@ -224,10 +224,11 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
|
|||
# In case of CI builds we want to pre-install master version of airflow dependencies so that
|
||||
# We do not have to always reinstall it from the scratch.
|
||||
# This can be reinstalled from latest master by increasing PIP_DEPENDENCIES_EPOCH_NUMBER.
|
||||
# And is automatically reinstalled from the scratch with every python patch level release
|
||||
RUN pip install "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
|
||||
# And is automatically reinstalled from the scratch every time patch release of python gets released
|
||||
RUN pip install \
|
||||
"https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \
|
||||
--constraint "https://raw.githubusercontent.com/${AIRFLOW_REPO}/${AIRFLOW_BRANCH}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt" \
|
||||
&& pip uninstall --yes apache-airflow
|
||||
&& pip uninstall --yes apache-airflow;
|
||||
|
||||
# Link dumb-init for backwards compatibility (so that older images also work)
|
||||
RUN ln -sf /usr/bin/dumb-init /usr/local/bin/dumb-init
|
||||
|
@ -275,7 +276,8 @@ COPY airflow/www/static ${AIRFLOW_SOURCES}/airflow/www/static/
|
|||
# Package JS/css for production
|
||||
RUN yarn --cwd airflow/www run prod
|
||||
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
COPY scripts/ci/in_container/entrypoint_ci.sh /entrypoint
|
||||
RUN chmod a+x /entrypoint
|
||||
|
||||
# Copy selected subdirectories only
|
||||
COPY .github/ ${AIRFLOW_SOURCES}/.github/
|
||||
|
@ -291,9 +293,6 @@ COPY .coveragerc .rat-excludes .flake8 pylintrc LICENSE MANIFEST.in NOTICE CHANG
|
|||
setup.cfg setup.py \
|
||||
${AIRFLOW_SOURCES}/
|
||||
|
||||
# Needed for building images via docker-in-docker inside the docker
|
||||
COPY Dockerfile.ci ${AIRFLOW_SOURCES}/Dockerfile.ci
|
||||
|
||||
# Install autocomplete for airflow
|
||||
RUN register-python-argcomplete airflow >> ~/.bashrc
|
||||
|
||||
|
@ -317,6 +316,4 @@ ENV PATH="${HOME}:${PATH}"
|
|||
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint.sh"]
|
||||
|
||||
CMD ["--help"]
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"]
|
||||
|
|
344
IMAGES.rst
344
IMAGES.rst
|
@ -53,13 +53,13 @@ also change the repository itself by adding ``--dockerhub-user`` and ``--dockerh
|
|||
|
||||
You can build the CI image using this command:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image
|
||||
|
||||
You can build production image using this command:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --production-image
|
||||
|
||||
|
@ -73,7 +73,7 @@ can change the extras via the ``--extras`` parameters. You can see default extra
|
|||
For example if you want to build python 3.7 version of production image with
|
||||
"all" extras installed you should run this command:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --python 3.7 --extras "all" --production-image
|
||||
|
||||
|
@ -90,42 +90,132 @@ In Breeze by default, the airflow is installed using local sources of Apache Air
|
|||
You can also build production images from PIP packages via providing ``--install-airflow-version``
|
||||
parameter to Breeze:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --python 3.7 --extras=gcp --production-image --install-airflow-version=1.10.9
|
||||
|
||||
This will build the image using command similar to:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
pip install apache-airflow[sendgrid]==1.10.9 \
|
||||
--constraint https://raw.githubusercontent.com/apache/airflow/v1-10-test/requirements/requirements-python3.7.txt
|
||||
|
||||
This will also download entrypoint script from https://raw.githubusercontent.com/apache/airflow/v1-10-test/entrypoint.sh
|
||||
url. It is important so that we have matching version of the requirements.
|
||||
|
||||
The requirement files and entrypoint only appeared in version 1.10.10 of airflow so if you install
|
||||
The requirement files only appeared in version 1.10.10 of airflow so if you install
|
||||
an earlier version - both constraint and requirements should point to 1.10.10 version.
|
||||
|
||||
You can also build production images from specific Git version via providing ``--install-airflow-reference``
|
||||
parameter to Breeze:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
pip install https://github.com/apache/airflow/archive/<tag>.tar.gz#egg=apache-airflow \
|
||||
--constraint https://raw.githubusercontent.com/apache/airflow/<tag>/requirements/requirements-python3.7.txt
|
||||
|
||||
This will also Download entrypoint script from ``https://raw.githubusercontent.com/apache/airflow/<tag>/entrypoint.sh``
|
||||
url.
|
||||
Using cache during builds
|
||||
=========================
|
||||
|
||||
Default mechanism used in Breeze for building images uses - as base - images puled from DockerHub or
|
||||
GitHub Image Registry. This is in order to speed up local builds and CI builds - instead of 15 minutes
|
||||
for rebuild of CI images, it takes usually less than 3 minutes when cache is used. For CI builds this is
|
||||
usually the best strategy - to use default "pull" cache - same for Production Image - it's better to rely
|
||||
on the "pull" mechanism rather than rebuild the image from the scratch.
|
||||
|
||||
However when you are iterating on the images and want to rebuild them quickly and often you can provide the
|
||||
``--use-local-cache`` flag to build commands - this way the standard docker mechanism based on local cache
|
||||
will be used. The first time you run it, it will take considerably longer time than if you use the
|
||||
default pull mechanism, but then when you do small, incremental changes to local sources, Dockerfile image
|
||||
and scripts further rebuilds with --use-local-cache will be considerably faster.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --python 3.7 --production-image --use-local-cache
|
||||
|
||||
You can also turn local docker caching by setting DOCKER_CACHE variable to "local" instead of the default
|
||||
"pulled" and export it to Breeze.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export DOCKER_CACHE="local"
|
||||
|
||||
You can also - if you really want - disable caching altogether by setting this variable to "no-cache".
|
||||
This is how "scheduled" builds in our CI are run - those builds take a long time because they
|
||||
always rebuild everything from scratch.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export DOCKER_CACHE="no-cache"
|
||||
|
||||
|
||||
Choosing image registry
|
||||
=======================
|
||||
|
||||
By default images are pulled and pushed from and to DockerHub registry when you use Breeze's push-image
|
||||
or build commands.
|
||||
|
||||
Our images are named like that:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
apache/airflow:<BRANCH_OR_TAG>[-<PATCH>]-pythonX.Y - for production images
|
||||
apache/airflow:<BRANCH_OR_TAG>[-<PATCH>]-pythonX.Y-ci - for CI images
|
||||
apache/airflow:<BRANCH_OR_TAG>[-<PATCH>]-pythonX.Y-build - for production build stage
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
apache/airflow:master-python3.6 - production "latest" image from current master
|
||||
apache/airflow:master-python3.6-ci - CI "latest" image from current master
|
||||
apache/airflow:v1-10-test-python2.7-ci - CI "latest" image from current v1-10-test branch
|
||||
apache/airflow:1.10.10-python3.6 - production image for 1.10.10 release
|
||||
apache/airflow:1.10.10-1-python3.6 - production image for 1.10.10 with some patches applied
|
||||
|
||||
|
||||
You can see DockerHub images at `<https://hub.docker.com/repository/docker/apache/airflow>`_
|
||||
|
||||
By default DockerHub registry is used when you push or pull such images.
|
||||
However for CI builds we keep the images in GitHub registry as well - this way we can easily push
|
||||
the images automatically after merge requests and use such images for Pull Requests
|
||||
as cache - which makes it much it much faster for CI builds (images are available in cache
|
||||
right after merged request in master finishes it's build), The difference is visible especially if
|
||||
significant changes are done in the Dockerfile.CI.
|
||||
|
||||
The images are named differently (in Docker definition of image names - registry URL is part of the
|
||||
image name if DockerHub is not used as registry). Also GitHub has its own structure for registries
|
||||
each project has its own registry naming convention that should be followed. The name of
|
||||
images for GitHub registry are:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y - for production images
|
||||
docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y-ci - for CI images
|
||||
docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y-build - for production build state
|
||||
|
||||
Note that we never push or pull TAG images to GitHub registry. It is only used for CI builds
|
||||
|
||||
You can see all the current GitHub images at `<https://github.com/apache/airflow/packages>`_
|
||||
|
||||
In order to interact with the GitHub images you need to add ``--github-registry`` flag to the pull/push
|
||||
commands in Breeze. This way the images will be pulled/pushed from/to GitHub rather than from/to
|
||||
DockerHub. Images are build locally as ``apache/airflow`` images but then they are tagged with the right
|
||||
GitHub tags for you.
|
||||
|
||||
You can read more about the CI configuration and how CI builds are using DockerHub/GitHub images
|
||||
in `<CI.rst>`_.
|
||||
|
||||
Note that you need to be committer and have the right to push to DockerHub and GitHub and you need to
|
||||
be logged in. Only committers can push images directly.
|
||||
|
||||
|
||||
Technical details of Airflow images
|
||||
===================================
|
||||
|
||||
The CI image is used by Breeze as shell image but it is also used during CI build.
|
||||
The image is single segment image that contains Airflow installation with "all" dependencies installed.
|
||||
It is optimised for rebuild speed It installs PIP dependencies from the current branch first -
|
||||
so that any changes in setup.py do not trigger
|
||||
reinstalling of all dependencies. There is a second step of installation that re-installs the dependencies
|
||||
It is optimised for rebuild speed. It installs PIP dependencies from the current branch first -
|
||||
so that any changes in setup.py do not trigger reinstalling of all dependencies.
|
||||
There is a second step of installation that re-installs the dependencies
|
||||
from the latest sources so that we are sure that latest dependencies are installed.
|
||||
|
||||
The production image is a multi-segment image. The first segment "airflow-build-image" contains all the
|
||||
|
@ -135,6 +225,11 @@ build it from local sources. This is particularly useful in CI environment where
|
|||
to run Kubernetes tests. See below for the list of arguments that should be provided to build
|
||||
production image from the local sources.
|
||||
|
||||
The image is primarily optimised for size of the final image, but also for speed of rebuilds - the
|
||||
'airlfow-build-image' segment uses the same technique as the CI builds for pre-installing PIP dependencies.
|
||||
It first pre-installs them from the right github branch and only after that final airflow installation is
|
||||
done from either local sources or remote location (PIP or github repository).
|
||||
|
||||
Manually building the images
|
||||
----------------------------
|
||||
|
||||
|
@ -180,11 +275,10 @@ The following build arguments (``--build-arg`` in docker build command) can be u
|
|||
| | | done for cassandra driver (much faster) |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP |
|
||||
| | | dependencies are installed (CI |
|
||||
| | | optimised) |
|
||||
| | | dependencies are pre-installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies |
|
||||
| | | are installed (CI optimised) |
|
||||
| | | are pre-installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_CI_BUILD_EPOCH`` | ``1`` | increasing this value will reinstall PIP |
|
||||
| | | dependencies from the repository from |
|
||||
|
@ -192,9 +286,9 @@ The following build arguments (``--build-arg`` in docker build command) can be u
|
|||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_EXTRAS`` | ``all`` | extras to install |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_AIRFLOW_EXTRAS`` | ```` | additional extras to install |
|
||||
| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | additional extras to install |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_PYTHON_DEPS`` | \```\` | additional python dependencies to |
|
||||
| ``ADDITIONAL_PYTHON_DEPS`` | | additional python dependencies to |
|
||||
| | | install |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_DEV_DEPS`` | ```` | additional apt dev dependencies to |
|
||||
|
@ -208,7 +302,7 @@ Here are some examples of how CI images can built manually. CI is always built f
|
|||
|
||||
This builds the CI image in version 3.7 with default extras ("all").
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . -f Dockerfile.ci --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.7
|
||||
|
@ -216,7 +310,7 @@ This builds the CI image in version 3.7 with default extras ("all").
|
|||
|
||||
This builds the CI image in version 3.6 with "gcp" extra only.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . -f Dockerfile.ci --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.6 --build-arg AIRFLOW_EXTRAS=gcp
|
||||
|
@ -224,14 +318,14 @@ This builds the CI image in version 3.6 with "gcp" extra only.
|
|||
|
||||
This builds the CI image in version 3.6 with "apache-beam" extra added.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . -f Dockerfile.ci --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.6 --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam"
|
||||
|
||||
This builds the CI image in version 3.6 with "mssql" additional package added.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . -f Dockerfile.ci --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.6 --build-arg ADDITIONAL_PYTHON_DEPS="mssql"
|
||||
|
@ -270,8 +364,11 @@ The following build arguments (``--build-arg`` in docker build command) can be u
|
|||
| ``AIRFLOW_ORG`` | ``apache`` | Github organisation from which Airflow |
|
||||
| | | is installed (when installed from repo) |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_REPO`` | ``airflow`` | Github repository from which Airflow is |
|
||||
| | | installed (when installed from repo) |
|
||||
| ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP |
|
||||
| | | dependencies are pre-installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies |
|
||||
| | | are pre-installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``AIRFLOW_GIT_REFERENCE`` | ``master`` | reference (branch or tag) from Github |
|
||||
| | | repository from which Airflow is |
|
||||
|
@ -285,10 +382,10 @@ The following build arguments (``--build-arg`` in docker build command) can be u
|
|||
| ``AIRFLOW_EXTRAS`` | (see Dockerfile) | Default extras with which airflow is |
|
||||
| | | installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_AIRFLOW_EXTRAS`` | ```` | Optional additional extras with which |
|
||||
| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which |
|
||||
| | | airflow is installed |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_PYTHON_DEPS`` | ```` | Optional python packages to extend |
|
||||
| ``ADDITIONAL_PYTHON_DEPS`` | | Optional python packages to extend |
|
||||
| | | the image with some extra dependencies |
|
||||
+------------------------------------------+------------------------------------------+------------------------------------------+
|
||||
| ``ADDITIONAL_DEV_DEPS`` | ```` | additional apt dev dependencies to |
|
||||
|
@ -342,25 +439,20 @@ production image. There are three types of build:
|
|||
| | the package or from GitHub URL. |
|
||||
| | See examples below |
|
||||
+-----------------------------------+-----------------------------------+
|
||||
| ``ENTRYPOINT_FILE`` | Should point to entrypoint.sh |
|
||||
| | file in case of installation from |
|
||||
| | the package or from GitHub URL. |
|
||||
| | See examples below |
|
||||
+-----------------------------------+-----------------------------------+
|
||||
| ``AIRFLOW_WWW`` | In case of Airflow 2.0 it should |
|
||||
| | be "www", in case of Airflow 1.10 |
|
||||
| | series it should be "www_rbac". |
|
||||
| | See examples below |
|
||||
+-----------------------------------+-----------------------------------+
|
||||
| ``AIRFLOW_SOURCES_FROM`` | Sources of Airflow. Set it to |
|
||||
| | "entrypoint.sh" to avoid costly |
|
||||
| | "empty" to avoid costly |
|
||||
| | Docker context copying |
|
||||
| | in case of installation from |
|
||||
| | the package or from GitHub URL. |
|
||||
| | See examples below |
|
||||
+-----------------------------------+-----------------------------------+
|
||||
| ``AIRFLOW_SOURCES_TO`` | Target for Airflow sources. Set |
|
||||
| | to "/entrypoint" to avoid costly |
|
||||
| | to "/empty" to avoid costly |
|
||||
| | Docker context copying |
|
||||
| | in case of installation from |
|
||||
| | the package or from GitHub URL. |
|
||||
|
@ -368,9 +460,10 @@ production image. There are three types of build:
|
|||
+-----------------------------------+-----------------------------------+
|
||||
|
||||
|
||||
This builds production image in version 3.6 with default extras from the local sources:
|
||||
This builds production image in version 3.6 with default extras from the local sources (master version
|
||||
of 2.0 currently):
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build .
|
||||
|
||||
|
@ -379,46 +472,48 @@ requirements taken from v1-10-test branch in Github.
|
|||
Note that versions 1.10.9 and below have no requirements so requirements should be taken from head of
|
||||
the 1.10.10 tag.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . \
|
||||
--build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
|
||||
--build-arg AIRFLOW_INSTALL_SOURCES="https://github.com/apache/airflow/archive/1.10.10.tar.gz#egg=apache-airflow" \
|
||||
--build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
|
||||
--build-arg ENTRYPOINT_FILE="https://raw.githubusercontent.com/apache/airflow/1.10.10/entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_FROM="entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/entrypoint"
|
||||
--build-arg AIRFLOW_BRANCH="v1-10-test" \
|
||||
--build-arg AIRFLOW_SOURCES_FROM="empty" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/empty"
|
||||
|
||||
This builds the production image in version 3.7 with default extras from 1.10.10 Pypi package and
|
||||
requirements taken from v1-10-test branch in Github.
|
||||
requirements taken from 1.10.10 tag in Github and pre-installed pip dependencies from the top
|
||||
of v1-10-test branch.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . \
|
||||
--build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
|
||||
--build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
|
||||
--build-arg AIRFLOW_INSTALL_VERSION="==1.10.10" \
|
||||
--build-arg AIRFLOW_BRANCH="v1-10-test" \
|
||||
--build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
|
||||
--build-arg ENTRYPOINT_FILE="https://raw.githubusercontent.com/apache/airflow/1.10.10/entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_FROM="entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/entrypoint"
|
||||
--build-arg AIRFLOW_SOURCES_FROM="empty" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/empty"
|
||||
|
||||
This builds the production image in version 3.7 with additional airflow extras from 1.10.10 Pypi package and
|
||||
additional python dependencies.
|
||||
additional python dependencies and pre-installed pip dependencies from the top
|
||||
of v1-10-test branch.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
docker build . \
|
||||
--build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
|
||||
--build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
|
||||
--build-arg AIRFLOW_INSTALL_VERSION="==1.10.10" \
|
||||
--build-arg AIRFLOW_BRANCH="v1-10-test" \
|
||||
--build-arg CONSTRAINT_REQUIREMENTS="https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python3.7.txt" \
|
||||
--build-arg ENTRYPOINT_FILE="https://raw.githubusercontent.com/apache/airflow/1.10.10/entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_FROM="entrypoint.sh" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/entrypoint" \
|
||||
--build-arg AIRFLOW_SOURCES_FROM="empty" \
|
||||
--build-arg AIRFLOW_SOURCES_TO="/empty" \
|
||||
--build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs"
|
||||
--build-arg ADDITIONAL_PYTHON_DEPS="sshtunnel oauth2client"
|
||||
|
||||
|
@ -446,7 +541,7 @@ Image manifests
|
|||
Together with the main CI images we also build and push image manifests. Those manifests are very small images
|
||||
that contain only results of the docker inspect for the image. This is in order to be able to
|
||||
determine very quickly if the image in the docker registry has changed a lot since the last time.
|
||||
Unfortunately docker registry (specifically dockerhub registry) has no anonymous way of querying image
|
||||
Unfortunately docker registry (specifically DockerHub registry) has no anonymous way of querying image
|
||||
details via API, you need to download the image to inspect it. We overcame it in the way that
|
||||
always when we build the image we build a very small image manifest and push it to registry together
|
||||
with the main CI image. The tag for the manifest image is the same as for the image it refers
|
||||
|
@ -465,29 +560,158 @@ You can do it via the ``--force-pull-images`` flag to force pulling the latest i
|
|||
|
||||
For production image:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --force-pull-images --production-image
|
||||
|
||||
For CI image Breeze automatically uses force pulling in case it determines that your image is very outdated,
|
||||
however uou can also force it with the same flag.
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze build-image --force-pull-images
|
||||
|
||||
Using the images
|
||||
================
|
||||
|
||||
Both images have entrypoint set as dumb-init with entrypoint.sh script executed (in order to forward
|
||||
signals). This entrypoint works as follows:
|
||||
Embedded image scripts
|
||||
======================
|
||||
|
||||
Both images have a set of scripts that can be used in the image. Those are:
|
||||
* /entrypoint - entrypoint script used when entering the image
|
||||
* /clean-logs - script for periodic log cleaning
|
||||
|
||||
|
||||
Running the CI image
|
||||
====================
|
||||
|
||||
The entrypoint in the CI image contains all the initialisation needed for tests to be immediately executed.
|
||||
It is copied from ``scripts/ci/in_container/entrypoint_ci.sh``.
|
||||
|
||||
The default behaviour is that you are dropped into bash shell. However if RUN_TESTS variable is
|
||||
set to "true", then tests passed as arguments are executed
|
||||
|
||||
The entrypoint performs those operations:
|
||||
|
||||
* checks if the environment is ready to test (including database and all integrations). It waits
|
||||
until all the components are ready to work
|
||||
|
||||
* installs older version of Airflow (if older version of Airflow is requested to be installed
|
||||
via ``INSTALL_AIRFLOW_VERSION`` variable.
|
||||
|
||||
* Sets up Kerberos if Kerberos integration is enabled (generates and configures Kerberos token)
|
||||
|
||||
* Sets up ssh keys for ssh tests and restarts teh SSH server
|
||||
|
||||
* Sets all variables and configurations needed for unit tests to run
|
||||
|
||||
* Reads additional variables set in ``files/airflow-breeze-config/variables.env`` by sourcing that file
|
||||
|
||||
* In case of CI run sets parallelism to 2 to avoid excessive number of processes to run
|
||||
|
||||
* In case of CI run sets default parameters for pytest
|
||||
|
||||
* In case of running integration/long_running/quarantined tests - it sets the right pytest flags
|
||||
|
||||
* Sets default "tests" target in case the target is not explicitly set as additional argument
|
||||
|
||||
* Runs system tests if RUN_SYSTEM_TESTS flag is specified, otherwise runs regular unit and integration tests
|
||||
|
||||
|
||||
Using the PROD image
|
||||
====================
|
||||
|
||||
The PROD image entrypoint works as follows:
|
||||
|
||||
* If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres
|
||||
SQL alchemy connection, then the connection is checked and the script waits until the database is reachable.
|
||||
|
||||
* If no ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is set or if it is set to sqlite SQL alchemy connection
|
||||
then db reset is executed.
|
||||
|
||||
* If ``AIRFLOW__CELERY__BROKER_URL`` variable is passed and scheduler, worker of flower command is used then
|
||||
the connection is checked and the script waits until the Celery broker database is reachable.
|
||||
|
||||
* If first argument is equal to "bash" - you are dropped in bash shell.
|
||||
* If there are any arguments they are passed to "airflow" command
|
||||
* If first argument equals to "bash" - it dropped in bash shell or executes bash command if you specify
|
||||
extra arguments. For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker run -it apache/airflow:master-python3.6 bash -c "ls -la"
|
||||
total 16
|
||||
drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 .
|
||||
drwxr-xr-x 1 root root 4096 Jun 5 18:12 ..
|
||||
drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 dags
|
||||
drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 logs
|
||||
|
||||
* If first argument is equal to "python" - you are dropped in python shell or python commands are executed if
|
||||
you pass extra parameters. For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
> docker run -it apache/airflow:master-python3.6 python -c "print('test')"
|
||||
test
|
||||
|
||||
* If there are any other arguments - they are passed to "airflow" command
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
> docker run -it apache/airflow:master-python3.6 --help
|
||||
|
||||
usage: airflow [-h]
|
||||
{celery,config,connections,dags,db,info,kerberos,plugins,pools,roles,rotate_fernet_key,scheduler,sync_perm,tasks,users,variables,version,webserver}
|
||||
...
|
||||
|
||||
positional arguments:
|
||||
|
||||
Groups:
|
||||
celery Start celery components
|
||||
connections List/Add/Delete connections
|
||||
dags List and manage DAGs
|
||||
db Database operations
|
||||
pools CRUD operations on pools
|
||||
roles Create/List roles
|
||||
tasks List and manage tasks
|
||||
users CRUD operations on users
|
||||
variables CRUD operations on variables
|
||||
|
||||
Commands:
|
||||
config Show current application configuration
|
||||
info Show information about current Airflow and environment
|
||||
kerberos Start a kerberos ticket renewer
|
||||
plugins Dump information about loaded plugins
|
||||
rotate_fernet_key Rotate encrypted connection credentials and variables
|
||||
scheduler Start a scheduler instance
|
||||
sync_perm Update permissions for existing roles and DAGs
|
||||
version Show the version
|
||||
webserver Start a Airflow webserver instance
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Alpha versions of 1.10.10 production-ready images
|
||||
=================================================
|
||||
|
||||
The production images have been released for the first time in 1.10.10 release of Airflow as "Alpha" quality
|
||||
ones. Between 1.10.10 the images are being improved and the 1.10.10 images should be patched and
|
||||
published several times separately in order to test them with the upcoming Helm Chart.
|
||||
|
||||
Those images are for development and testing only and should not be used outside of the
|
||||
development community.
|
||||
|
||||
The images were pushed with tags following the pattern: ``apache/airflow:1.10.10.1-alphaN-pythonX.Y``.
|
||||
Patch level is an increasing number (starting from 1).
|
||||
|
||||
Those are alpha-quality releases however they contain the officially released Airflow ``1.10.10`` code.
|
||||
The main changes in the images are scripts embedded in the images.
|
||||
|
||||
The following versions were pushed:
|
||||
|
||||
+-------+--------------------------------+----------------------------------------------------------+
|
||||
| Patch | Tag pattern | Description |
|
||||
+=======+================================+==========================================================+
|
||||
| 1 | ``1.10.10.1-alpha1-pythonX.Y`` | Support for parameters added to bash and python commands |
|
||||
+-------+--------------------------------+----------------------------------------------------------+
|
||||
| 2 | ``1.10.10-1-alpha2-pythonX.Y`` | Added "/clean-logs" script |
|
||||
+-------+--------------------------------+----------------------------------------------------------+
|
||||
|
||||
The commits used to generate those images are tagged with ``prod-image-1.10.10.1-alphaN`` tags.
|
||||
|
|
10
TESTING.rst
10
TESTING.rst
|
@ -131,23 +131,23 @@ Running Tests for a Specified Target Using Breeze from the Host
|
|||
---------------------------------------------------------------
|
||||
|
||||
If you wish to only run tests and not to drop into shell, apply the
|
||||
``-t``, ``--test-target`` flag. You can add extra pytest flags after ``--`` in the command line.
|
||||
``tests`` command. You can add extra targets and pytest flags after the ``tests`` command.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze test-target tests/hooks/test_druid_hook.py -- --logging-level=DEBUG
|
||||
./breeze tests tests/hooks/test_druid_hook.py tests/tests_core.py --logging-level=DEBUG
|
||||
|
||||
You can run the whole test suite with a special '.' test target:
|
||||
You can run the whole test suite with a 'tests' test target:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze test-target .
|
||||
./breeze tests tests
|
||||
|
||||
You can also specify individual tests or a group of tests:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./breeze test-target tests/test_core.py::TestCore
|
||||
./breeze tests tests/test_core.py::TestCore
|
||||
|
||||
|
||||
Airflow Integration Tests
|
||||
|
|
260
breeze
260
breeze
|
@ -103,12 +103,6 @@ function setup_default_breeze_variables() {
|
|||
# Determines if help should be run (set to true by --help flag)
|
||||
RUN_HELP="false"
|
||||
|
||||
# Holds chosen command to run in case 'execute-command' command is used.
|
||||
RUN_COMMAND=""
|
||||
|
||||
# Holds the test target if the 'test-target' command is used.
|
||||
TEST_TARGET=""
|
||||
|
||||
# Holds docker compose command if the `docker-compose` command is used.
|
||||
DOCKER_COMPOSE_COMMAND=""
|
||||
|
||||
|
@ -426,12 +420,24 @@ function prepare_command_file() {
|
|||
local TESTS="${3}"
|
||||
local COMPOSE_FILE="${4}"
|
||||
local AIRFLOW_IMAGE="${5}"
|
||||
local EXPANSION="${6-@}"
|
||||
cat <<EOF > "${FILE}"
|
||||
#!/usr/bin/env bash
|
||||
cd "\$(pwd)" || exit
|
||||
if [[ \${VERBOSE} == "true" ]]; then
|
||||
echo
|
||||
echo "Executing script:"
|
||||
echo
|
||||
echo "\${BASH_SOURCE[0]} \${@}"
|
||||
echo
|
||||
set -x
|
||||
fi
|
||||
cd "\$( dirname "\${BASH_SOURCE[0]}" )" || exit
|
||||
export DOCKERHUB_USER=${DOCKERHUB_USER}
|
||||
export DOCKERHUB_REPO=${DOCKERHUB_REPO}
|
||||
HOST_USER_ID=\$(id -ur)
|
||||
export HOST_USER_ID
|
||||
HOST_GROUP_ID=\$(id -gr)
|
||||
export HOST_GROUP_ID
|
||||
export HOST_AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
|
||||
export COMPOSE_FILE="${COMPOSE_FILE}"
|
||||
export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
export BACKEND="${BACKEND}"
|
||||
|
@ -445,10 +451,10 @@ export MYSQL_HOST_PORT="${MYSQL_HOST_PORT}"
|
|||
export MYSQL_VERSION="${MYSQL_VERSION}"
|
||||
export AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
|
||||
export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}"
|
||||
export AIRFOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
|
||||
export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
|
||||
export AIRFLOW_IMAGE="${AIRFLOW_IMAGE}"
|
||||
export SQLITE_URL="${SQLITE_URL}"
|
||||
docker-compose --log-level INFO ${CMD}\$${EXPANSION}"
|
||||
docker-compose --log-level INFO ${CMD}
|
||||
EOF
|
||||
chmod u+x "${FILE}"
|
||||
}
|
||||
|
@ -501,43 +507,22 @@ function prepare_command_files() {
|
|||
export COMPOSE_CI_FILE
|
||||
export COMPOSE_PROD_FILE
|
||||
|
||||
CI_ENTRYPOINT_FILE="/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh"
|
||||
PROD_ENTRYPOINT_FILE="/entrypoint"
|
||||
|
||||
# Base python image for the build
|
||||
export PYTHON_BASE_IMAGE=python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster
|
||||
export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
|
||||
export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
export BUILT_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
|
||||
DC_RUN_CI_COMMAND="run --service-ports --rm airflow \"${CI_ENTRYPOINT_FILE} "
|
||||
DC_RUN_PROD_COMMAND="run --service-ports --rm airflow \"${PROD_ENTRYPOINT_FILE} "
|
||||
|
||||
LAST_DC_RUN_CI_FILE="cmd_run_ci"
|
||||
LAST_DC_RUN_PROD_FILE="cmd_run_prod"
|
||||
LAST_DC_TEST_CI_FILE="test_run_ci"
|
||||
LAST_DC_CI_FILE="dc_ci"
|
||||
LAST_DC_PROD_FILE="dc_prod"
|
||||
|
||||
# Prepare script for "run ci command"
|
||||
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_RUN_CI_FILE}" \
|
||||
"${DC_RUN_CI_COMMAND}" "false" "${COMPOSE_CI_FILE}" "${AIRFLOW_CI_IMAGE}" '*'
|
||||
|
||||
# Prepare script for "run prod command"
|
||||
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_RUN_PROD_FILE}" \
|
||||
"${DC_RUN_PROD_COMMAND}" "false" "${COMPOSE_PROD_FILE}" "${AIRFLOW_PROD_IMAGE}" '*'
|
||||
|
||||
# Prepare script for "run test"
|
||||
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_TEST_CI_FILE}" \
|
||||
"${DC_RUN_CI_COMMAND}" "true" "${COMPOSE_CI_FILE}" "${AIRFLOW_CI_IMAGE}" '*'
|
||||
|
||||
# Prepare script for "run docker compose command"
|
||||
# Prepare script for "run docker compose CI command"
|
||||
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" \
|
||||
'"' "false" "${COMPOSE_CI_FILE}" "${AIRFLOW_CI_IMAGE}"
|
||||
"\"\${@}\"" "false" "${COMPOSE_CI_FILE}" "${AIRFLOW_CI_IMAGE}"
|
||||
|
||||
# Prepare script for "run docker compose prod command"
|
||||
# Prepare script for "run docker compose PROD command"
|
||||
prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" \
|
||||
'"' "false" "${COMPOSE_PROD_FILE}" "${AIRFLOW_PROD_IMAGE}"
|
||||
"\"\${@}\"" "false" "${COMPOSE_PROD_FILE}" "${AIRFLOW_PROD_IMAGE}"
|
||||
}
|
||||
|
||||
# Prints detailed help for all commands and flgas. Used to generate documentation added to BREEZE.rst
|
||||
|
@ -746,11 +731,11 @@ function parse_arguments() {
|
|||
echo
|
||||
export FORWARD_CREDENTIALS="true"
|
||||
shift 1 ;;
|
||||
-c|--registry-cache)
|
||||
-c|--github-registry)
|
||||
echo
|
||||
echo "Use cache for the container registry"
|
||||
echo "Use github registry"
|
||||
echo
|
||||
export ENABLE_REGISTRY_CACHE="true"
|
||||
export USE_GITHUB_REGISTRY="true"
|
||||
shift ;;
|
||||
-G|--github-organisation)
|
||||
echo
|
||||
|
@ -840,10 +825,6 @@ function parse_arguments() {
|
|||
fi
|
||||
COMMAND_TO_RUN="run_docker_compose"
|
||||
;;
|
||||
execute-command)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
COMMAND_TO_RUN="run_in_bash"
|
||||
shift ;;
|
||||
generate-requirements)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
COMMAND_TO_RUN="perform_generate_requirements"
|
||||
|
@ -863,7 +844,7 @@ function parse_arguments() {
|
|||
push-image)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
COMMAND_TO_RUN="perform_push_image"
|
||||
SKIP_CHECK_REMOTE_IMAGE="true"
|
||||
export SKIP_CHECK_REMOTE_IMAGE="true"
|
||||
shift ;;
|
||||
initialize-local-virtualenv)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
|
@ -914,33 +895,6 @@ function parse_arguments() {
|
|||
shift 2
|
||||
fi
|
||||
;;
|
||||
static-check-all-files)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
COMMAND_TO_RUN="perform_static_checks"
|
||||
if [[ "$#" -lt 2 ]]; then
|
||||
if [[ ${RUN_HELP} != "true" ]]; then
|
||||
echo "You should specify static check that you would like to run or 'all' to run all checks."
|
||||
echo
|
||||
echo "One of :"
|
||||
echo
|
||||
echo "${_BREEZE_ALLOWED_STATIC_CHECKS:=}"
|
||||
echo
|
||||
echo "For example:"
|
||||
echo
|
||||
echo "${CMDNAME} static-check-all-files mypy"
|
||||
echo
|
||||
exit 1
|
||||
else
|
||||
shift
|
||||
fi
|
||||
else
|
||||
export PYTHON_MAJOR_MINOR_VERSION=${STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION}
|
||||
export STATIC_CHECK="${2:-}"
|
||||
export STATIC_CHECK_ALL_FILES="true"
|
||||
EXTRA_STATIC_CHECK_OPTIONS+=("--all-files" "--show-diff-on-failure")
|
||||
shift 2
|
||||
fi
|
||||
;;
|
||||
stop)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
COMMAND_TO_RUN="run_docker_compose"
|
||||
|
@ -955,14 +909,11 @@ function parse_arguments() {
|
|||
SECOND_COMMAND_TO_RUN="enter_breeze"
|
||||
echo "Restarts the environment. Includes emptying the databases."
|
||||
shift ;;
|
||||
test-target)
|
||||
tests)
|
||||
LAST_SUBCOMMAND="${1}"
|
||||
if [[ $# -lt 2 ]]; then
|
||||
RUN_HELP="true"
|
||||
shift
|
||||
else
|
||||
export TEST_TARGET="${2}"
|
||||
shift 2
|
||||
fi
|
||||
COMMAND_TO_RUN="run_tests" ;;
|
||||
toggle-suppress-cheatsheet)
|
||||
|
@ -1062,7 +1013,6 @@ function prepare_usage() {
|
|||
export USAGE_BUILD_IMAGE="Builds CI or Production docker image"
|
||||
export USAGE_CLEANUP_IMAGE="Cleans up the container image created"
|
||||
export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command"
|
||||
export USAGE_EXECUTE_COMMAND="Executes specified command in the container"
|
||||
export USAGE_FLAGS="Shows all breeze's flags"
|
||||
export USAGE_GENERATE_REQUIREMENTS="Generates pinned requirements for pip dependencies"
|
||||
export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv"
|
||||
|
@ -1077,14 +1027,14 @@ function prepare_usage() {
|
|||
export USAGE_STATIC_CHECK_ALL_FILES="Performs selected static check for all files"
|
||||
export USAGE_TOGGLE_SUPPRESS_CHEATSHEET="Toggles on/off cheatsheet"
|
||||
export USAGE_TOGGLE_SUPPRESS_ASCIIART="Toggles on/off asciiart"
|
||||
export USAGE_TEST_TARGET="Runs selected test target in the container"
|
||||
export USAGE_TESTS="Runs selected tests in the container"
|
||||
export USAGE_HELP="Shows this help message"
|
||||
export USAGE_HELP_ALL="Shows detailed help for all commands and flags"
|
||||
|
||||
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_SHELL="
|
||||
${CMDNAME} shell [FLAGS] -- <EXTRA_ARGS>
|
||||
${CMDNAME} shell [FLAGS] [-- <EXTRA_ARGS>]
|
||||
|
||||
This is default subcommand if no subcommand is used.
|
||||
|
||||
|
@ -1101,13 +1051,18 @@ ${CMDNAME} shell [FLAGS] -- <EXTRA_ARGS>
|
|||
and webserver ports are forwarded to appropriate database/webserver so that you can
|
||||
connect to it from your host environment.
|
||||
|
||||
You can also pass <EXTRA_ARGS> after -- they will be passed as bash parameters, this is
|
||||
especially useful to pass bash options, for example -c to execute command:
|
||||
|
||||
'${CMDNAME} shell -- -c \"ls -la\"'
|
||||
|
||||
Flags:
|
||||
$(flag_footer)
|
||||
"
|
||||
# shellcheck disable=SC2090
|
||||
export DETAILED_USAGE_SHELL
|
||||
export DETAILED_USAGE_EXEC="
|
||||
${CMDNAME} exec
|
||||
${CMDNAME} exec [-- <EXTRA_ARGS>]
|
||||
|
||||
Execs into interactive shell to an already running container. The container mus be started
|
||||
already by breeze shell command. If you are not familiar with tmux, this is the best
|
||||
|
@ -1159,7 +1114,7 @@ $(flag_verbosity)
|
|||
export DETAILED_USAGE_CLEANUP_IMAGE
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_DOCKER_COMPOSE="
|
||||
${CMDNAME} docker-compose [FLAGS] COMMAND -- <EXTRA_ARGS>
|
||||
${CMDNAME} docker-compose [FLAGS] COMMAND [-- <EXTRA_ARGS>]
|
||||
|
||||
Run docker-compose command instead of entering the environment. Use 'help' as command
|
||||
to see available commands. The <EXTRA_ARGS> passed after -- are treated
|
||||
|
@ -1174,31 +1129,12 @@ $(flag_verbosity)
|
|||
"
|
||||
# shellcheck disable=SC2090
|
||||
export DETAILED_USAGE_DOCKER_COMPOSE
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_EXECUTE_COMMAND="
|
||||
${CMDNAME} execute-command [FLAGS] COMMAND -- <EXTRA_ARGS>
|
||||
|
||||
Run chosen command instead of entering the environment. The command is run using
|
||||
'bash -c \"<command with args>\" if you need to pass arguments to your command, you need
|
||||
to pass them together with command surrounded with \" or '. Alternatively you can
|
||||
pass arguments as <EXTRA_ARGS> passed after --. For example:
|
||||
|
||||
'${CMDNAME} execute-command \"ls -la\"' or
|
||||
'${CMDNAME} execute-command ls -- --la'
|
||||
|
||||
Flags:
|
||||
$(flag_airflow_variants)
|
||||
$(flag_backend_variants)
|
||||
$(flag_verbosity)
|
||||
"
|
||||
# shellcheck disable=SC2090
|
||||
export DETAILED_USAGE_EXECUTE_COMMAND
|
||||
export DETAILED_USAGE_FLAGS="
|
||||
Explains in detail all the flags that can be used with breeze.
|
||||
"
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_PREPARE_BACKPORT_README="
|
||||
${CMDNAME} prepare-backport-packages [FLAGS] -- [YYYY.MM.DD] [PACKAGE_ID ...]
|
||||
${CMDNAME} prepare-backport-packages [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...]
|
||||
|
||||
Prepares README.md files for backport packages. You can provide (after --) optional version
|
||||
in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for.
|
||||
|
@ -1209,12 +1145,12 @@ ${CMDNAME} prepare-backport-packages [FLAGS] -- [YYYY.MM.DD] [PACKAGE_ID ...]
|
|||
Examples:
|
||||
|
||||
'${CMDNAME} prepare-backport-readme' or
|
||||
'${CMDNAME} prepare-backport-readme -- 2020.05.10' or
|
||||
'${CMDNAME} prepare-backport-readme -- 2020.05.10 https google amazon'
|
||||
'${CMDNAME} prepare-backport-readme 2020.05.10' or
|
||||
'${CMDNAME} prepare-backport-readme 2020.05.10 https google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'${CMDNAME} prepare-backport-readme -- YYYY.MM.DD <PACKAGE_ID> ...'
|
||||
'${CMDNAME} prepare-backport-readme YYYY.MM.DD <PACKAGE_ID> ...'
|
||||
|
||||
* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date
|
||||
cannot be earlier than the already released version (the script will fail if it
|
||||
|
@ -1260,7 +1196,7 @@ $(flag_airflow_variants)
|
|||
export DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_PREPARE_BACKPORT_PACKAGES="
|
||||
${CMDNAME} prepare-backport-packages [FLAGS] -- [PACKAGE_ID ...]
|
||||
${CMDNAME} prepare-backport-packages [FLAGS] [PACKAGE_ID ...]
|
||||
|
||||
Prepares backport packages. You can provide (after --) optional list of packages to prepare.
|
||||
If no packages are specified, readme for all packages are generated. You can specify optional
|
||||
|
@ -1270,14 +1206,14 @@ ${CMDNAME} prepare-backport-packages [FLAGS] -- [PACKAGE_ID ...]
|
|||
Examples:
|
||||
|
||||
'${CMDNAME} prepare-backport-packages' or
|
||||
'${CMDNAME} prepare-backport-packages -- google' or
|
||||
'${CMDNAME} prepare-backport-packages --version-suffix-for-svn rc1 -- http google amazon' or
|
||||
'${CMDNAME} prepare-backport-packages --version-suffix-for-pypi rc1 -- http google amazon'
|
||||
'${CMDNAME} prepare-backport-packages google' or
|
||||
'${CMDNAME} prepare-backport-packages --version-suffix-for-svn rc1 http google amazon' or
|
||||
'${CMDNAME} prepare-backport-packages --version-suffix-for-pypi rc1 http google amazon'
|
||||
|
||||
General form:
|
||||
|
||||
'${CMDNAME} prepare-backport-packages \\
|
||||
[--version-suffix-for-svn|--version-suffix-for-pypi] -- <PACKAGE_ID> ...'
|
||||
[--version-suffix-for-svn|--version-suffix-for-pypi] <PACKAGE_ID> ...'
|
||||
|
||||
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
|
||||
'google'), but in several cases, it might be one level deeper separated with '.'
|
||||
|
@ -1294,7 +1230,7 @@ $(flag_verbosity)
|
|||
${CMDNAME} push_image [FLAGS]
|
||||
|
||||
Pushes images to docker registry. You can push the images to DockerHub registry (default)
|
||||
or to the GitHub cache registry (if --registry-cache flag is used).
|
||||
or to the GitHub registry (if --github-registry flag is used).
|
||||
|
||||
For DockerHub pushes --dockerhub-user and --dockerhub-repo flags can be used to specify
|
||||
the repository to push to. For GitHub repository --github-organisation and --github-repo
|
||||
|
@ -1307,8 +1243,8 @@ ${CMDNAME} push_image [FLAGS]
|
|||
'${CMDNAME} push-image' or
|
||||
'${CMDNAME} push-image --dockerhub-user user' to push to your private registry or
|
||||
'${CMDNAME} push-image --production-image' - to push production image or
|
||||
'${CMDNAME} push-image --registry-cache' - to push to GitHub cache or
|
||||
'${CMDNAME} push-image --registry-cache --github-organisation org' - for other organisation
|
||||
'${CMDNAME} push-image --github-registry' - to push to GitHub image registry or
|
||||
'${CMDNAME} push-image --github-registry --github-organisation org' - for other organisation
|
||||
|
||||
Flags:
|
||||
$(flag_pull_push_docker_images)
|
||||
|
@ -1359,7 +1295,7 @@ $(flag_footer)
|
|||
"
|
||||
export DETAILED_USAGE_RESTART
|
||||
export DETAILED_USAGE_STATIC_CHECK="
|
||||
${CMDNAME} static-check [FLAGS] STATIC_CHECK
|
||||
${CMDNAME} static-check [FLAGS] STATIC_CHECK [-- <EXTRA_ARGS>]
|
||||
|
||||
Run selected static checks for currently changed files. You should specify static check that
|
||||
you would like to run or 'all' to run all checks. One of:
|
||||
|
@ -1371,44 +1307,29 @@ ${FORMATTED_STATIC_CHECKS}
|
|||
|
||||
'${CMDNAME} static-check mypy' or
|
||||
'${CMDNAME} static-check mypy -- --files tests/core.py'
|
||||
'${CMDNAME} static-check mypy -- --all-files'
|
||||
|
||||
You can see all the options by adding --help EXTRA_ARG:
|
||||
|
||||
'${CMDNAME} static-check mypy -- --help'
|
||||
"
|
||||
export DETAILED_USAGE_STATIC_CHECK_ALL_FILES="
|
||||
${CMDNAME} static-check-all [FLAGS] STATIC_CHECK
|
||||
|
||||
Run selected static checks for all applicable files. You should specify static check that
|
||||
you would like to run or 'all' to run all checks. One of:
|
||||
|
||||
${FORMATTED_STATIC_CHECKS}
|
||||
|
||||
You can pass extra arguments including options to the pre-commit framework as
|
||||
<EXTRA_ARGS> passed after --. For example:
|
||||
|
||||
'${CMDNAME} static-check-all-files mypy' or
|
||||
'${CMDNAME} static-check-all-files mypy -- --verbose'
|
||||
|
||||
You can see all the options by adding --help EXTRA_ARG:
|
||||
|
||||
'${CMDNAME} static-check-all-files mypy -- --help'
|
||||
"
|
||||
# shellcheck disable=SC2089
|
||||
DETAILED_USAGE_TEST_TARGET="
|
||||
${CMDNAME} test-target [FLAGS] TEST_TARGET -- <EXTRA_ARGS>
|
||||
DETAILED_USAGE_TESTS="
|
||||
${CMDNAME} tests [FLAGS] [TEST_TARGET ..] [-- <EXTRA_ARGS>]
|
||||
|
||||
Run the specified unit test target. There might be multiple
|
||||
targets specified separated with comas. The <EXTRA_ARGS> passed after -- are treated
|
||||
as additional options passed to pytest. For example:
|
||||
as additional options passed to pytest. You can pass 'tests' as target to
|
||||
run all tests. For example:
|
||||
|
||||
'${CMDNAME} test-target tests/test_core.py -- --logging-level=DEBUG'
|
||||
'${CMDNAME} tests tests/test_core.py -- --logging-level=DEBUG'
|
||||
'${CMDNAME} tests tests
|
||||
|
||||
Flags:
|
||||
$(flag_footer)
|
||||
"
|
||||
# shellcheck disable=SC2090
|
||||
export DETAILED_USAGE_TEST_TARGET
|
||||
export DETAILED_USAGE_TESTS
|
||||
export DETAILED_USAGE_TOGGLE_SUPPRESS_CHEATSHEET="
|
||||
${CMDNAME} toggle-suppress-cheatsheet
|
||||
|
||||
|
@ -1507,6 +1428,10 @@ function flag_airflow_variants() {
|
|||
echo "
|
||||
-p, --python <PYTHON_MAJOR_MINOR_VERSION>
|
||||
Python version used for the image. This is always major/minor version.
|
||||
|
||||
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
|
||||
--install-airflow-version or --install-airflow-reference flags.
|
||||
|
||||
One of:
|
||||
|
||||
${FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS}
|
||||
|
@ -1705,10 +1630,10 @@ function flag_pull_push_docker_images() {
|
|||
-H, --dockerhub-repo
|
||||
DockerHub repository used to pull, push, build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_REPO:=}.
|
||||
|
||||
-c, --registry-cache
|
||||
If registry cache is enabled, pulls and pushes are done from the registry cache in github.
|
||||
You need to be logged in to the registry in order to be able to pull/push from it and you
|
||||
need to be committer to push to airflow registry.
|
||||
-c, --github-registry
|
||||
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
|
||||
DockerHub. You need to be logged in to the registry in order to be able to pull/push from it
|
||||
and you need to be committer to push to Apache Airflow' GitHub registry.
|
||||
|
||||
-G, --github-organisation
|
||||
GitHub organisation used to pull, push images when cache is used. Default: ${_BREEZE_DEFAULT_GITHUB_ORGANISATION:=}.
|
||||
|
@ -1857,6 +1782,29 @@ function read_saved_environment_variables {
|
|||
# sessions.
|
||||
function check_and_save_all_params() {
|
||||
check_and_save_allowed_param "PYTHON_MAJOR_MINOR_VERSION" "Python version" "--python"
|
||||
|
||||
if [[ "${INSTALL_AIRFLOW_REFERENCE:=}" != "" ]]; then
|
||||
if [[ ${INSTALL_AIRFLOW_REFERENCE} == *1_10* ]]; then
|
||||
export BRANCH_NAME="v1-10-test"
|
||||
fi
|
||||
elif [[ "${INSTALL_AIRFLOW_VERSION:=}" != "" ]]; then
|
||||
if [[ ${INSTALL_AIRFLOW_VERSION} == *1.10* ]]; then
|
||||
export BRANCH_NAME="v1-10-test"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${PYTHON_MAJOR_MINOR_VERSION} == "2.7" || ${PYTHON_MAJOR_MINOR_VERSION} == "3.5" ]]; then
|
||||
if [[ ${BRANCH_NAME} == "master" ]]; then
|
||||
echo >&2
|
||||
echo >&2 "The ${PYTHON_MAJOR_MINOR_VERSION} can only be used when installing Airflow 1.10.*"
|
||||
echo >&2
|
||||
echo >&2 "You can use it only when you specify 1.10 Airflow via --install-airflow-version"
|
||||
echo >&2 "or --install-airflow-reference and they point to 1.10 version of Airflow"
|
||||
echo >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
check_and_save_allowed_param "BACKEND" "backend" "--backend"
|
||||
check_and_save_allowed_param "KUBERNETES_MODE" "Kubernetes mode" "--kubernetes-mode"
|
||||
check_and_save_allowed_param "KUBERNETES_VERSION" "Kubernetes version" "--kubernetes-version"
|
||||
|
@ -1877,25 +1825,6 @@ function print_cheatsheet() {
|
|||
echo
|
||||
echo " Airflow Breeze CHEATSHEET"
|
||||
echo
|
||||
print_line
|
||||
echo
|
||||
echo
|
||||
print_line
|
||||
echo
|
||||
echo " Bash scripts to run commands quickly:"
|
||||
echo
|
||||
echo " * Enter the CI environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_CI_FILE}"
|
||||
echo " * Enter the production environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_PROD_FILE}"
|
||||
echo " * Run command in CI environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_CI_FILE} "\
|
||||
"[command with args] [bash options]"
|
||||
echo " * Run tests in CI environment : ${BUILD_CACHE_DIR}/${LAST_DC_TEST_CI_FILE} "\
|
||||
"[test target] [pytest options]"
|
||||
echo " * Run docker-compose CI command : ${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE} "\
|
||||
"[docker compose command] [docker-compose options]"
|
||||
echo " * Run docker-compose production cmd : ${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE} "\
|
||||
"[docker compose command] [docker-compose options]"
|
||||
echo
|
||||
|
||||
set +e
|
||||
if ! command -v breeze; then
|
||||
print_line
|
||||
|
@ -2009,7 +1938,7 @@ function run_static_checks {
|
|||
# command chosen
|
||||
function run_build_command {
|
||||
case "${COMMAND_TO_RUN}" in
|
||||
run_tests|run_docker_compose|run_in_bash)
|
||||
run_tests|run_docker_compose)
|
||||
prepare_ci_build
|
||||
rebuild_ci_image_if_needed
|
||||
;;
|
||||
|
@ -2084,9 +2013,9 @@ function run_breeze_command {
|
|||
case "${COMMAND_TO_RUN}" in
|
||||
enter_breeze)
|
||||
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_RUN_PROD_FILE}"
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" run --service-ports --rm airflow "${@}"
|
||||
else
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_RUN_CI_FILE}"
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "${@}"
|
||||
fi
|
||||
;;
|
||||
run_exec)
|
||||
|
@ -2102,10 +2031,10 @@ function run_breeze_command {
|
|||
: "${AIRFLOW_TESTING_CONTAINER:?"ERROR! Breeze must be running in order to exec into running container"}"
|
||||
set -e
|
||||
docker exec -it "${AIRFLOW_TESTING_CONTAINER}" \
|
||||
"/opt/airflow/scripts/ci/in_container/entrypoint_exec.sh"
|
||||
"/opt/airflow/scripts/ci/in_container/entrypoint_exec.sh" "${@}"
|
||||
;;
|
||||
run_tests)
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_TEST_CI_FILE}" "\"${TEST_TARGET}\"" "$@"
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "$@"
|
||||
;;
|
||||
run_docker_compose)
|
||||
set +u
|
||||
|
@ -2117,16 +2046,13 @@ function run_breeze_command {
|
|||
"${DC_FILE}" "${DOCKER_COMPOSE_COMMAND}" "${EXTRA_DC_OPTIONS[@]}" "$@"
|
||||
set -u
|
||||
;;
|
||||
run_in_bash)
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_RUN_CI_FILE}" "${RUN_COMMAND}" "$@"
|
||||
;;
|
||||
perform_static_checks)
|
||||
make_sure_precommit_is_installed
|
||||
run_static_checks "$@"
|
||||
run_static_checks "${@}"
|
||||
;;
|
||||
build_image)
|
||||
;;
|
||||
cleanup_image)
|
||||
cleanup_image)
|
||||
remove_images
|
||||
;;
|
||||
perform_generate_requirements)
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
_BREEZE_ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS="3.6 3.7 3.8"
|
||||
_BREEZE_ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS="2.7 3.5 3.6 3.7 3.8"
|
||||
_BREEZE_ALLOWED_BACKENDS="sqlite mysql postgres"
|
||||
_BREEZE_ALLOWED_INTEGRATIONS="cassandra kerberos mongo openldap presto rabbitmq redis"
|
||||
_BREEZE_ALLOWED_KUBERNETES_MODES="image git"
|
||||
|
@ -110,7 +110,7 @@ kubernetes-mode: kubernetes-version:
|
|||
skip-mounting-local-sources install-airflow-version: install-airflow-reference: db-reset
|
||||
verbose assume-yes assume-no assume-quit forward-credentials
|
||||
force-build-images force-pull-images production-image extras: force-clean-images use-local-cache
|
||||
dockerhub-user: dockerhub-repo: registry-cache github-organisation: github-repo:
|
||||
dockerhub-user: dockerhub-repo: github-registry github-organisation: github-repo:
|
||||
postgres-version: mysql-version:
|
||||
version-suffix-for-pypi: version-suffix-for-svn:
|
||||
additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps:
|
||||
|
@ -123,11 +123,8 @@ build-image
|
|||
cleanup-image
|
||||
exec
|
||||
generate-requirements
|
||||
prepare-backport-readme
|
||||
prepare-backport-packages
|
||||
push-image
|
||||
initialize-local-virtualenv
|
||||
kind-cluster
|
||||
setup-autocomplete
|
||||
stop
|
||||
restart
|
||||
|
@ -136,11 +133,11 @@ toggle-suppress-asciiart"
|
|||
|
||||
export BREEZE_EXTRA_ARG_COMMANDS="
|
||||
docker-compose
|
||||
execute-command
|
||||
kind-cluster
|
||||
prepare-backport-readme
|
||||
prepare-backport-packages
|
||||
static-check
|
||||
static-check-all-files
|
||||
test-target"
|
||||
tests"
|
||||
|
||||
export BREEZE_HELP_COMMANDS="
|
||||
flags
|
||||
|
|
|
@ -70,7 +70,7 @@ separately.
|
|||
When you want to prepare release notes for a package, you need to run:
|
||||
|
||||
```
|
||||
./breeze prepare-backport-readme -- [YYYY.MM.DD] <PACKAGE_ID> ...
|
||||
./breeze prepare-backport-readme [YYYY.MM.DD] <PACKAGE_ID> ...
|
||||
```
|
||||
|
||||
|
||||
|
@ -89,14 +89,14 @@ merged in the master recently, they will be automatically taken into account.
|
|||
Typically, the first time you run release before release, you run it with target release.date:
|
||||
|
||||
```
|
||||
./breeze prepare-backport-readme -- 2020.05.20 google
|
||||
./breeze prepare-backport-readme 2020.05.20 google
|
||||
```
|
||||
|
||||
Then while you iterate with merges and release candidates you update the release date wihout providing
|
||||
the date (to update the existing release notes)
|
||||
|
||||
```
|
||||
./breeze prepare-backport-readme -- google
|
||||
./breeze prepare-backport-readme google
|
||||
```
|
||||
|
||||
|
||||
|
@ -161,37 +161,37 @@ omitting the package ids altogether.
|
|||
* To build the release candidate packages for SVN Apache upload run the following command:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn=rc1 -- [PACKAGE_ID] ...
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn=rc1 [PACKAGE_ID] ...
|
||||
```
|
||||
|
||||
for example:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn=rc1 -- http ...
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn=rc1 http ...
|
||||
```
|
||||
|
||||
* To build the release candidate packages for PyPI upload run the following command:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi=rc1 -- [PACKAGE_ID] ...
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi=rc1 [PACKAGE_ID] ...
|
||||
```
|
||||
|
||||
for example:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi=rc1 -- http ...
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi=rc1 http ...
|
||||
```
|
||||
|
||||
|
||||
* To build the final release packages run the following command:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages -- [PACKAGE_ID] ...
|
||||
./breeze prepare-backport-packages [PACKAGE_ID] ...
|
||||
```
|
||||
for example:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages -- http ...
|
||||
./breeze prepare-backport-packages http ...
|
||||
```
|
||||
|
||||
* For each package, this creates a wheel package and source distribution package in your `dist` folder with
|
||||
|
@ -284,7 +284,7 @@ It will generate `apache-airflow-backport-providers-${VERSION}-source.tar.gz`
|
|||
if you ony build few packages, run:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn rc1 -- PACKAGE PACKAGE ....
|
||||
./breeze prepare-backport-packages --version-suffix-for-svn rc1 PACKAGE PACKAGE ....
|
||||
```
|
||||
|
||||
* Move the source tarball to dist folder
|
||||
|
@ -394,7 +394,7 @@ this will clean up dist folder before generating the packages so you will only h
|
|||
if you ony build few packages, run:
|
||||
|
||||
```bash
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi rc1 -- PACKAGE PACKAGE ....
|
||||
./breeze prepare-backport-packages --version-suffix-for-pypi rc1 PACKAGE PACKAGE ....
|
||||
```
|
||||
|
||||
* Verify the artifacts that would be uploaded:
|
||||
|
|
|
@ -25,11 +25,7 @@ function run_airflow_testing_in_docker() {
|
|||
-f "${MY_DIR}/docker-compose/backend-${BACKEND}.yml" \
|
||||
"${INTEGRATIONS[@]}" \
|
||||
"${DOCKER_COMPOSE_LOCAL[@]}" \
|
||||
run airflow \
|
||||
'/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"' \
|
||||
/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"
|
||||
# Note the command is there twice (!) because it is passed via bash -c
|
||||
# and bash -c starts passing parameters from $0. TODO: fixme
|
||||
run airflow "${@}"
|
||||
set -u
|
||||
}
|
||||
|
||||
|
|
|
@ -19,16 +19,9 @@ version: "2.2"
|
|||
services:
|
||||
airflow:
|
||||
image: ${AIRFLOW_IMAGE}
|
||||
init: true
|
||||
entrypoint: ["/bin/bash", "-c"]
|
||||
environment:
|
||||
- USER=root
|
||||
- ADDITIONAL_PATH=~/.local/bin
|
||||
- HADOOP_DISTRO=cdh
|
||||
- HADOOP_HOME=/opt/hadoop-cdh
|
||||
- HADOOP_OPTS=-D/opt/krb5.conf
|
||||
- HIVE_HOME=/opt/hive
|
||||
- MINICLUSTER_HOME=/opt/minicluster
|
||||
- CELERY_BROKER_URLS=amqp://guest:guest@rabbitmq:5672,redis://redis:6379/0
|
||||
- BACKEND
|
||||
- CI
|
||||
|
|
|
@ -31,7 +31,7 @@ services:
|
|||
- ../../../.kube:/root/.kube:cached
|
||||
- ../../../files:/files:cached
|
||||
- ../../../dist:/dist:cached
|
||||
- ../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint_ci.sh:cached
|
||||
- ../../../scripts/prod/entrypoint_prod.sh:/entrypoint:cached
|
||||
- ../../../setup.cfg:/opt/airflow/setup.cfg:cached
|
||||
- ../../../setup.py:/opt/airflow/setup.py:cached
|
||||
- ../../../tests:/opt/airflow/tests:cached
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
version: "2.2"
|
||||
services:
|
||||
airflow:
|
||||
stdin_open: true # docker run -i
|
||||
tty: true # docker run -t
|
||||
# We need to mount files an directories individually because some files
|
||||
# such apache_airflow.egg-info should not be mounted from host
|
||||
# we only mount those files that it makes sense to edit while developing
|
||||
|
@ -51,7 +53,7 @@ services:
|
|||
- ../../../pytest.ini:/opt/airflow/pytest.ini:cached
|
||||
- ../../../requirements:/opt/airflow/requirements:cached
|
||||
- ../../../scripts:/opt/airflow/scripts:cached
|
||||
- ../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint_ci.sh:cached
|
||||
- ../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint:cached
|
||||
- ../../../setup.cfg:/opt/airflow/setup.cfg:cached
|
||||
- ../../../setup.py:/opt/airflow/setup.py:cached
|
||||
- ../../../tests:/opt/airflow/tests:cached
|
||||
|
|
|
@ -20,7 +20,7 @@ if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
|
|||
fi
|
||||
|
||||
# shellcheck source=scripts/ci/in_container/_in_container_script_init.sh
|
||||
. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
|
||||
. /opt/airflow/scripts/ci/in_container/_in_container_script_init.sh
|
||||
|
||||
AIRFLOW_SOURCES=$(cd "${MY_DIR}/../../.." || exit 1; pwd)
|
||||
|
||||
|
@ -41,8 +41,6 @@ fi
|
|||
|
||||
echo
|
||||
|
||||
ARGS=( "$@" )
|
||||
|
||||
RUN_TESTS=${RUN_TESTS:="true"}
|
||||
INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION:=""}"
|
||||
|
||||
|
@ -86,15 +84,6 @@ fi
|
|||
|
||||
export RUN_AIRFLOW_1_10=${RUN_AIRFLOW_1_10:="false"}
|
||||
|
||||
export HADOOP_DISTRO="${HADOOP_DISTRO:="cdh"}"
|
||||
export HADOOP_HOME="${HADOOP_HOME:="/opt/hadoop-cdh"}"
|
||||
|
||||
if [[ ${VERBOSE} == "true" ]]; then
|
||||
echo
|
||||
echo "Using ${HADOOP_DISTRO} distribution of Hadoop from ${HADOOP_HOME}"
|
||||
echo
|
||||
fi
|
||||
|
||||
# Added to have run-tests on path
|
||||
export PATH=${PATH}:${AIRFLOW_SOURCES}
|
||||
|
||||
|
@ -134,10 +123,6 @@ if [[ ${INTEGRATION_KERBEROS:="false"} == "true" ]]; then
|
|||
fi
|
||||
|
||||
|
||||
# Start MiniCluster
|
||||
java -cp "/opt/minicluster-1.1-SNAPSHOT/*" com.ing.minicluster.MiniCluster \
|
||||
>"${AIRFLOW_HOME}/logs/minicluster.log" 2>&1 &
|
||||
|
||||
# Set up ssh keys
|
||||
echo 'yes' | ssh-keygen -t rsa -C your_email@youremail.com -m PEM -P '' -f ~/.ssh/id_rsa \
|
||||
>"${AIRFLOW_HOME}/logs/ssh-keygen.log" 2>&1
|
||||
|
@ -160,25 +145,13 @@ ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null
|
|||
# shellcheck source=scripts/ci/in_container/configure_environment.sh
|
||||
. "${MY_DIR}/configure_environment.sh"
|
||||
|
||||
if [[ ${CI:=} == "true" && ${RUN_TESTS} == "true" ]] ; then
|
||||
echo
|
||||
echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo " Setting default parallellism to 2 because we can run out of memory during tests on CI"
|
||||
echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
echo
|
||||
export AIRFLOW__CORE__PARALELLISM=2
|
||||
fi
|
||||
cd "${AIRFLOW_SOURCES}"
|
||||
|
||||
set +u
|
||||
# If we do not want to run tests, we simply drop into bash
|
||||
if [[ "${RUN_TESTS}" == "false" ]]; then
|
||||
if [[ ${#ARGS} == 0 ]]; then
|
||||
exec /bin/bash
|
||||
else
|
||||
exec /bin/bash -c "$(printf "%q " "${ARGS[@]}")"
|
||||
fi
|
||||
if [[ "${RUN_TESTS:=false}" != "true" ]]; then
|
||||
exec /bin/bash "${@}"
|
||||
fi
|
||||
|
||||
set -u
|
||||
|
||||
if [[ "${CI}" == "true" ]]; then
|
||||
|
@ -199,10 +172,11 @@ else
|
|||
CI_ARGS=()
|
||||
fi
|
||||
|
||||
TESTS_TO_RUN="tests/"
|
||||
declare -a TESTS_TO_RUN
|
||||
TESTS_TO_RUN=("tests")
|
||||
|
||||
if [[ ${#@} -gt 0 && -n "$1" ]]; then
|
||||
TESTS_TO_RUN="$1"
|
||||
TESTS_TO_RUN=("${@}")
|
||||
fi
|
||||
|
||||
if [[ -n ${RUN_INTEGRATION_TESTS:=""} ]]; then
|
||||
|
@ -227,7 +201,7 @@ elif [[ ${ONLY_RUN_QUARANTINED_TESTS:=""} == "true" ]]; then
|
|||
"--timeout" "90")
|
||||
fi
|
||||
|
||||
ARGS=("${CI_ARGS[@]}" "${TESTS_TO_RUN}")
|
||||
ARGS=("${CI_ARGS[@]}" "${TESTS_TO_RUN[@]}")
|
||||
|
||||
if [[ ${RUN_SYSTEM_TESTS:="false"} == "true" ]]; then
|
||||
"${MY_DIR}/run_system_tests.sh" "${ARGS[@]}"
|
||||
|
|
|
@ -18,4 +18,4 @@
|
|||
# shellcheck source=scripts/ci/in_container/configure_environment.sh
|
||||
. "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
|
||||
|
||||
exec /bin/bash
|
||||
exec /bin/bash "${@}"
|
||||
|
|
|
@ -18,15 +18,12 @@
|
|||
# shellcheck source=scripts/ci/in_container/_in_container_script_init.sh
|
||||
. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
|
||||
|
||||
# any argument received is overriding the default nose execution arguments:
|
||||
PYTEST_ARGS=( "$@" )
|
||||
|
||||
echo
|
||||
echo "Starting the tests with those pytest arguments: ${PYTEST_ARGS[*]}"
|
||||
echo "Starting the tests with those pytest arguments:" "${@}"
|
||||
echo
|
||||
set +e
|
||||
|
||||
pytest "${PYTEST_ARGS[@]}"
|
||||
pytest "${@}"
|
||||
|
||||
RES=$?
|
||||
|
||||
|
@ -36,7 +33,7 @@ if [[ "${RES}" == "0" && ${CI:="false"} == "true" ]]; then
|
|||
bash <(curl -s https://codecov.io/bash)
|
||||
fi
|
||||
|
||||
if [[ ${CI} == "true" ]]; then
|
||||
if [[ ${CI:=} == "true" ]]; then
|
||||
send_airflow_logs_to_file_io
|
||||
fi
|
||||
|
||||
|
|
|
@ -23,10 +23,9 @@
|
|||
function add_build_args_for_remote_install() {
|
||||
# entrypoint is used as AIRFLOW_SOURCES_FROM/TO in order to avoid costly copying of all sources of
|
||||
# Airflow - those are not needed for remote install at all. Entrypoint is later overwritten by
|
||||
# ENTRYPOINT_FILE - downloaded entrypoint.sh so this is only for the purpose of iteration on Dockerfile
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
|
||||
"--build-arg" "AIRFLOW_SOURCES_FROM=entrypoint.sh"
|
||||
"--build-arg" "AIRFLOW_SOURCES_TO=/entrypoint"
|
||||
"--build-arg" "AIRFLOW_SOURCES_FROM=empty"
|
||||
"--build-arg" "AIRFLOW_SOURCES_TO=/empty"
|
||||
)
|
||||
if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
|
||||
# All types of references/versions match this regexp for 1.10 series
|
||||
|
@ -36,21 +35,20 @@ function add_build_args_for_remote_install() {
|
|||
# This is only for 1.10.0 - 1.10.9
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
|
||||
"--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/1.10.10/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
|
||||
"--build-arg" "ENTRYPOINT_FILE=https://raw.githubusercontent.com/apache/airflow/1.10.10/entrypoint.sh"
|
||||
)
|
||||
else
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
|
||||
# For 1.10.10+ and v1-10-test it's ok to use AIRFLOW_VERSION as reference
|
||||
"--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
|
||||
"--build-arg" "ENTRYPOINT_FILE=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/entrypoint.sh"
|
||||
)
|
||||
fi
|
||||
AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
|
||||
else
|
||||
# For all other (master, 2.0+) we just match ${AIRFLOW_VERSION}
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
|
||||
"--build-arg" "CONSTRAINT_REQUIREMENTS=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt"
|
||||
"--build-arg" "ENTRYPOINT_FILE=https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_VERSION}/entrypoint.sh"
|
||||
)
|
||||
AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="master"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -205,10 +203,10 @@ function get_local_image_info() {
|
|||
TMP_MANIFEST_LOCAL_SHA=$(mktemp)
|
||||
set +e
|
||||
# Remove the container just in case
|
||||
verbose_docker rm --force "local-airflow-manifest" >/dev/null 2>&1
|
||||
verbose_docker_hide_output_on_success rm --force "local-airflow-manifest"
|
||||
# Create manifest from the local manifest image
|
||||
if ! verbose_docker create --name "local-airflow-manifest" \
|
||||
"${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" >/dev/null 2>&1 ; then
|
||||
if ! verbose_docker_hide_output_on_success create --name "local-airflow-manifest" \
|
||||
"${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" >>"${OUTPUT_LOG}" 2>&1 ; then
|
||||
echo
|
||||
echo "Local manifest image not available"
|
||||
echo
|
||||
|
@ -217,9 +215,10 @@ function get_local_image_info() {
|
|||
fi
|
||||
set -e
|
||||
# Create manifest from the local manifest image
|
||||
verbose_docker cp "local-airflow-manifest:${AIRFLOW_CI_BASE_TAG}.json" "${TMP_MANIFEST_LOCAL_JSON}" >/dev/null 2>&1
|
||||
verbose_docker_hide_output_on_success cp "local-airflow-manifest:${AIRFLOW_CI_BASE_TAG}.json" \
|
||||
"${TMP_MANIFEST_LOCAL_JSON}" >>"${OUTPUT_LOG}" 2>&1
|
||||
sed 's/ *//g' "${TMP_MANIFEST_LOCAL_JSON}" | grep '^"sha256:' >"${TMP_MANIFEST_LOCAL_SHA}"
|
||||
verbose_docker rm --force "local-airflow-manifest" >/dev/null 2>&1
|
||||
verbose_docker_hide_output_on_success rm --force "local-airflow-manifest" >>"${OUTPUT_LOG}" 2>&1
|
||||
}
|
||||
|
||||
#
|
||||
|
@ -233,7 +232,7 @@ function get_local_image_info() {
|
|||
function get_remote_image_info() {
|
||||
set +e
|
||||
# Pull remote manifest image
|
||||
if ! verbose_docker pull "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" >/dev/null; then
|
||||
if ! verbose_docker_hide_output_on_success pull "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"; then
|
||||
echo
|
||||
echo "Remote docker registry unreachable"
|
||||
echo
|
||||
|
@ -250,12 +249,14 @@ function get_remote_image_info() {
|
|||
TMP_MANIFEST_REMOTE_JSON=$(mktemp)
|
||||
TMP_MANIFEST_REMOTE_SHA=$(mktemp)
|
||||
# Create container out of the manifest image without running it
|
||||
verbose_docker create --cidfile "${TMP_CONTAINER_ID}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"
|
||||
verbose_docker_hide_output_on_success create --cidfile "${TMP_CONTAINER_ID}" \
|
||||
"${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"
|
||||
# Extract manifest and store it in local file
|
||||
verbose_docker cp "$(cat "${TMP_CONTAINER_ID}"):${AIRFLOW_CI_BASE_TAG}.json" "${TMP_MANIFEST_REMOTE_JSON}"
|
||||
verbose_docker_hide_output_on_success cp "$(cat "${TMP_CONTAINER_ID}"):${AIRFLOW_CI_BASE_TAG}.json" \
|
||||
"${TMP_MANIFEST_REMOTE_JSON}"
|
||||
# Filter everything except SHAs of image layers
|
||||
sed 's/ *//g' "${TMP_MANIFEST_REMOTE_JSON}" | grep '^"sha256:' >"${TMP_MANIFEST_REMOTE_SHA}"
|
||||
verbose_docker rm --force "$( cat "${TMP_CONTAINER_ID}")"
|
||||
verbose_docker_hide_output_on_success rm --force "$( cat "${TMP_CONTAINER_ID}")"
|
||||
}
|
||||
|
||||
# The Number determines the cut-off between local building time and pull + build time.
|
||||
|
@ -273,7 +274,7 @@ function get_remote_image_info() {
|
|||
# Note that this only matters if you have any of the important files changed since the last build
|
||||
# of your image such as Dockerfile.ci, setup.py etc.
|
||||
#
|
||||
MAGIC_CUT_OFF_NUMBER_OF_LAYERS=34
|
||||
MAGIC_CUT_OFF_NUMBER_OF_LAYERS=36
|
||||
|
||||
# Compares layers from both remote and local image and set FORCE_PULL_IMAGES to true in case
|
||||
# More than the last NN layers are different.
|
||||
|
@ -315,11 +316,11 @@ function print_build_info() {
|
|||
# Prepares all variables needed by the CI build. Depending on the configuration used (python version
|
||||
# DockerHub user etc. the variables are set so that other functions can use those variables.
|
||||
function prepare_ci_build() {
|
||||
export AIRFLOW_CI_BASE_TAG="${DEFAULT_BRANCH}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
|
||||
export AIRFLOW_CI_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
|
||||
export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}-manifest"
|
||||
export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}-manifest"
|
||||
export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}"
|
||||
if [[ ${ENABLE_REGISTRY_CACHE="false"} == "true" ]]; then
|
||||
if [[ ${USE_GITHUB_REGISTRY="false"} == "true" ]]; then
|
||||
if [[ ${CACHE_REGISTRY_PASSWORD:=} != "" ]]; then
|
||||
echo "${CACHE_REGISTRY_PASSWORD}" | docker login \
|
||||
--username "${CACHE_REGISTRY_USERNAME}" \
|
||||
|
@ -334,7 +335,7 @@ function prepare_ci_build() {
|
|||
export CACHED_PYTHON_BASE_IMAGE=""
|
||||
fi
|
||||
export AIRFLOW_BUILD_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}/${AIRFLOW_CI_BASE_TAG}"
|
||||
export AIRFLOW_CI_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${DEFAULT_BRANCH}-ci"
|
||||
export AIRFLOW_CI_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-ci"
|
||||
export PYTHON_BASE_IMAGE="python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster"
|
||||
export BUILT_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
if [[ "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" == "${PYTHON_MAJOR_MINOR_VERSION}" ]]; then
|
||||
|
@ -569,10 +570,31 @@ Docker building ${AIRFLOW_CI_IMAGE}.
|
|||
# Prepares all variables needed by the CI build. Depending on the configuration used (python version
|
||||
# DockerHub user etc. the variables are set so that other functions can use those variables.
|
||||
function prepare_prod_build() {
|
||||
export AIRFLOW_PROD_BASE_TAG="${DEFAULT_BRANCH}-python${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
if [[ "${INSTALL_AIRFLOW_REFERENCE:=}" != "" ]]; then
|
||||
# When --install-airflow-reference is used then the image is build from github tag
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
"--build-arg" "AIRFLOW_INSTALL_SOURCES=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
|
||||
)
|
||||
export AIRFLOW_VERSION="${INSTALL_AIRFLOW_REFERENCE}"
|
||||
add_build_args_for_remote_install
|
||||
elif [[ "${INSTALL_AIRFLOW_VERSION:=}" != "" ]]; then
|
||||
# When --install-airflow-version is used then the image is build from PIP package
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
"--build-arg" "AIRFLOW_INSTALL_SOURCES=apache-airflow"
|
||||
"--build-arg" "AIRFLOW_INSTALL_VERSION===${INSTALL_AIRFLOW_VERSION}"
|
||||
)
|
||||
export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
|
||||
add_build_args_for_remote_install
|
||||
else
|
||||
# When no airflow version/reference is specified, production image is built from local sources
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
)
|
||||
fi
|
||||
|
||||
export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
|
||||
export AIRFLOW_PROD_BUILD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-build"
|
||||
export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}"
|
||||
export AIRFLOW_PROD_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${DEFAULT_BRANCH}"
|
||||
export AIRFLOW_PROD_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}"
|
||||
export PYTHON_BASE_IMAGE="python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster"
|
||||
if [[ "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" == "${PYTHON_MAJOR_MINOR_VERSION}" ]]; then
|
||||
export DEFAULT_IMAGE="${AIRFLOW_PROD_IMAGE_DEFAULT}"
|
||||
|
@ -588,7 +610,7 @@ function prepare_prod_build() {
|
|||
export ADDITIONAL_RUNTIME_DEPS="${ADDITIONAL_RUNTIME_DEPS:=""}"
|
||||
export AIRFLOW_IMAGE="${AIRFLOW_PROD_IMAGE}"
|
||||
|
||||
if [[ ${ENABLE_REGISTRY_CACHE="false"} == "true" ]]; then
|
||||
if [[ ${USE_GITHUB_REGISTRY="false"} == "true" ]]; then
|
||||
if [[ ${CACHE_REGISTRY_PASSWORD:=} != "" ]]; then
|
||||
echo "${CACHE_REGISTRY_PASSWORD}" | docker login \
|
||||
--username "${CACHE_REGISTRY_USERNAME}" \
|
||||
|
@ -610,26 +632,8 @@ function prepare_prod_build() {
|
|||
AIRFLOW_KUBERNETES_IMAGE_TAG=$(echo "${AIRFLOW_KUBERNETES_IMAGE}" | cut -f 2 -d ":")
|
||||
export AIRFLOW_KUBERNETES_IMAGE_TAG
|
||||
|
||||
if [[ "${INSTALL_AIRFLOW_REFERENCE:=}" != "" ]]; then
|
||||
# When --install-airflow-reference is used then the image is build from github tag
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
"--build-arg" "AIRFLOW_INSTALL_SOURCES=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
|
||||
)
|
||||
export AIRFLOW_VERSION="${INSTALL_AIRFLOW_REFERENCE}"
|
||||
add_build_args_for_remote_install
|
||||
elif [[ "${INSTALL_AIRFLOW_VERSION:=}" != "" ]]; then
|
||||
# When --install-airflow-version is used then the image is build from PIP package
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
"--build-arg" "AIRFLOW_INSTALL_SOURCES=apache-airflow"
|
||||
"--build-arg" "AIRFLOW_INSTALL_VERSION===${INSTALL_AIRFLOW_VERSION}"
|
||||
)
|
||||
export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
|
||||
add_build_args_for_remote_install
|
||||
else
|
||||
# When no airflow version/reference is specified, production image is built from local sources
|
||||
EXTRA_DOCKER_PROD_BUILD_FLAGS=(
|
||||
)
|
||||
fi
|
||||
AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="${BRANCH_NAME}"
|
||||
|
||||
go_to_airflow_sources
|
||||
}
|
||||
|
||||
|
@ -669,6 +673,7 @@ function build_prod_image() {
|
|||
--build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \
|
||||
--build-arg PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" \
|
||||
--build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
|
||||
--build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
|
||||
--build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
|
||||
--build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \
|
||||
--build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \
|
||||
|
@ -687,6 +692,7 @@ function build_prod_image() {
|
|||
--build-arg ADDITIONAL_DEV_DEPS="${ADDITIONAL_DEV_DEPS}" \
|
||||
--build-arg ADDITIONAL_RUNTIME_DEPS="${ADDITIONAL_RUNTIME_DEPS}" \
|
||||
--build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
|
||||
--build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
|
||||
--build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
|
||||
"${DOCKER_CACHE_PROD_DIRECTIVE[@]}" \
|
||||
-t "${AIRFLOW_PROD_IMAGE}" \
|
||||
|
|
|
@ -46,6 +46,16 @@ function initialize_common_environment {
|
|||
# All the subsequent questions
|
||||
export LAST_FORCE_ANSWER_FILE="${BUILD_CACHE_DIR}/last_force_answer.sh"
|
||||
|
||||
# This folder is mounted to inside the container in /files folder. This is the way how
|
||||
# We can exchange DAGs, scripts, packages etc with the container environment
|
||||
export FILES_DIR="${AIRFLOW_SOURCES}/files"
|
||||
# Temporary dir used well ... temporarily
|
||||
export TMP_DIR="${AIRFLOW_SOURCES}/tmp"
|
||||
|
||||
# Create those folders above in case they do not exist
|
||||
mkdir -p "${TMP_DIR}"
|
||||
mkdir -p "${FILES_DIR}"
|
||||
|
||||
# Create useful directories if not yet created
|
||||
mkdir -p "${AIRFLOW_SOURCES}/.mypy_cache"
|
||||
mkdir -p "${AIRFLOW_SOURCES}/logs"
|
||||
|
@ -69,7 +79,7 @@ function initialize_common_environment {
|
|||
export GITHUB_ORGANISATION=${GITHUB_ORGANISATION:="apache"}
|
||||
export GITHUB_REPO=${GITHUB_REPO:="airflow"}
|
||||
export CACHE_REGISTRY=${CACHE_REGISTRY:="docker.pkg.github.com"}
|
||||
export ENABLE_REGISTRY_CACHE=${ENABLE_REGISTRY_CACHE:="false"}
|
||||
export USE_GITHUB_REGISTRY=${USE_GITHUB_REGISTRY:="false"}
|
||||
|
||||
# Default port numbers for forwarded ports
|
||||
export WEBSERVER_HOST_PORT=${WEBSERVER_HOST_PORT:="28080"}
|
||||
|
|
|
@ -49,7 +49,7 @@ function generate_local_mounts_list {
|
|||
"$prefix"pytest.ini:/opt/airflow/pytest.ini:cached
|
||||
"$prefix"requirements:/opt/airflow/requirements:cached
|
||||
"$prefix"scripts:/opt/airflow/scripts:cached
|
||||
"$prefix"scripts/ci/in_container/entrypoint_ci.sh:/entrypoint_ci.sh:cached
|
||||
"$prefix"scripts/ci/in_container/entrypoint_ci.sh:/entrypoint:cached
|
||||
"$prefix"setup.cfg:/opt/airflow/setup.cfg:cached
|
||||
"$prefix"setup.py:/opt/airflow/setup.py:cached
|
||||
"$prefix"tests:/opt/airflow/tests:cached
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
function calculate_file_md5sum {
|
||||
local FILE="${1}"
|
||||
local MD5SUM
|
||||
local MD5SUM_CACHE_DIR="${BUILD_CACHE_DIR}/${DEFAULT_BRANCH}/${PYTHON_MAJOR_MINOR_VERSION}/${THE_IMAGE_TYPE}"
|
||||
local MD5SUM_CACHE_DIR="${BUILD_CACHE_DIR}/${BRANCH_NAME}/${PYTHON_MAJOR_MINOR_VERSION}/${THE_IMAGE_TYPE}"
|
||||
mkdir -pv "${MD5SUM_CACHE_DIR}"
|
||||
MD5SUM=$(md5sum "${FILE}")
|
||||
local MD5SUM_FILE
|
||||
|
@ -54,7 +54,7 @@ function calculate_file_md5sum {
|
|||
function move_file_md5sum {
|
||||
local FILE="${1}"
|
||||
local MD5SUM_FILE
|
||||
local MD5SUM_CACHE_DIR="${BUILD_CACHE_DIR}/${DEFAULT_BRANCH}/${PYTHON_MAJOR_MINOR_VERSION}/${THE_IMAGE_TYPE}"
|
||||
local MD5SUM_CACHE_DIR="${BUILD_CACHE_DIR}/${BRANCH_NAME}/${PYTHON_MAJOR_MINOR_VERSION}/${THE_IMAGE_TYPE}"
|
||||
mkdir -pv "${MD5SUM_CACHE_DIR}"
|
||||
MD5SUM_FILE="${MD5SUM_CACHE_DIR}"/$(basename "${FILE}").md5sum
|
||||
local MD5SUM_FILE_NEW
|
||||
|
|
|
@ -112,8 +112,8 @@ function pull_prod_images_if_needed() {
|
|||
fi
|
||||
# "Build" segment of production image
|
||||
pull_image_possibly_from_cache "${AIRFLOW_PROD_BUILD_IMAGE}" "${CACHED_AIRFLOW_PROD_BUILD_IMAGE}"
|
||||
# Main segment of production image
|
||||
pull_image_possibly_from_cache "${AIRFLOW_PROD_IMAGE}" "${CACHED_AIRFLOW_PROD_IMAGE}"
|
||||
# we never pull the main segment of production image - we always build it locally = this is
|
||||
# usually very fast this way and it is much nicer for rebuilds and development
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,6 @@ function push_prod_images() {
|
|||
if [[ -n ${DEFAULT_IMAGE:=""} && ${CACHED_AIRFLOW_PROD_IMAGE} == "" ]]; then
|
||||
verbose_docker push "${DEFAULT_IMAGE}"
|
||||
fi
|
||||
|
||||
# we do not need to push PYTHON base image here - they are already pushed in the CI push
|
||||
}
|
||||
|
||||
|
|
|
@ -54,6 +54,10 @@ function script_end {
|
|||
#shellcheck disable=2181
|
||||
EXIT_CODE=$?
|
||||
if [[ ${EXIT_CODE} != 0 ]]; then
|
||||
# Cat output log in case we exit with error
|
||||
if [[ -f "${OUTPUT_LOG}" ]]; then
|
||||
cat "${OUTPUT_LOG}"
|
||||
fi
|
||||
print_info "###########################################################################################"
|
||||
print_info " EXITING WITH STATUS CODE ${EXIT_CODE}"
|
||||
print_info "###########################################################################################"
|
||||
|
|
|
@ -35,6 +35,17 @@ function verbose_docker {
|
|||
docker "${@}"
|
||||
}
|
||||
|
||||
# In case "VERBOSE" is set to "true" (--verbose flag in Breeze) all docker commands run will be
|
||||
# printed before execution
|
||||
function verbose_docker_hide_output_on_success {
|
||||
if [[ ${VERBOSE:="false"} == "true" && ${VERBOSE_COMMANDS:=} != "true" ]]; then
|
||||
# do not print echo if VERBOSE_COMMAND is set (set -x does it already)
|
||||
echo "docker" "${@}"
|
||||
fi
|
||||
docker "${@}" >>"${OUTPUT_LOG}" 2>&1
|
||||
}
|
||||
|
||||
|
||||
# Prints verbose information in case VERBOSE variable is set
|
||||
function print_info() {
|
||||
if [[ ${VERBOSE:="false"} == "true" ]]; then
|
||||
|
|
|
@ -1,110 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Might be empty
|
||||
AIRFLOW_COMMAND="${1}"
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function verify_db_connection {
|
||||
DB_URL="${1}"
|
||||
|
||||
DB_CHECK_MAX_COUNT=${MAX_DB_CHECK_COUNT:=10}
|
||||
DB_CHECK_SLEEP_TIME=${DB_CHECK_SLEEP_TIME:=2}
|
||||
|
||||
local DETECTED_DB_BACKEND=""
|
||||
local DETECTED_DB_HOST=""
|
||||
local DETECTED_DB_PORT=""
|
||||
|
||||
|
||||
if [[ ${DB_URL} != sqlite* ]]; then
|
||||
# Auto-detect DB parameters
|
||||
[[ ${DB_URL} =~ ([^:]*)://([^@/]*)@?([^/:]*):?([0-9]*)/([^\?]*)\??(.*) ]] && \
|
||||
DETECTED_DB_BACKEND=${BASH_REMATCH[1]} &&
|
||||
# Not used USER match
|
||||
DETECTED_DB_HOST=${BASH_REMATCH[3]} &&
|
||||
DETECTED_DB_PORT=${BASH_REMATCH[4]} &&
|
||||
# Not used SCHEMA match
|
||||
# Not used PARAMS match
|
||||
|
||||
echo DB_BACKEND="${DB_BACKEND:=${DETECTED_DB_BACKEND}}"
|
||||
|
||||
if [[ -z "${DETECTED_DB_PORT}" ]]; then
|
||||
if [[ ${DB_BACKEND} == "postgres"* ]]; then
|
||||
DETECTED_DB_PORT=5432
|
||||
elif [[ ${DB_BACKEND} == "mysql"* ]]; then
|
||||
DETECTED_DB_PORT=3306
|
||||
fi
|
||||
fi
|
||||
|
||||
DETECTED_DB_HOST=${DETECTED_DB_HOST:="localhost"}
|
||||
|
||||
# Allow the DB parameters to be overridden by environment variable
|
||||
echo DB_HOST="${DB_HOST:=${DETECTED_DB_HOST}}"
|
||||
echo DB_PORT="${DB_PORT:=${DETECTED_DB_PORT}}"
|
||||
|
||||
while true
|
||||
do
|
||||
set +e
|
||||
LAST_CHECK_RESULT=$(nc -zvv "${DB_HOST}" "${DB_PORT}" >/dev/null 2>&1)
|
||||
RES=$?
|
||||
set -e
|
||||
if [[ ${RES} == 0 ]]; then
|
||||
echo
|
||||
break
|
||||
else
|
||||
echo -n "."
|
||||
DB_CHECK_MAX_COUNT=$((DB_CHECK_MAX_COUNT-1))
|
||||
fi
|
||||
if [[ ${DB_CHECK_MAX_COUNT} == 0 ]]; then
|
||||
echo
|
||||
echo "ERROR! Maximum number of retries (${DB_CHECK_MAX_COUNT}) reached while checking ${DB_BACKEND} db. Exiting"
|
||||
echo
|
||||
break
|
||||
else
|
||||
sleep "${DB_CHECK_SLEEP_TIME}"
|
||||
fi
|
||||
done
|
||||
if [[ ${RES} != 0 ]]; then
|
||||
echo " ERROR: ${BACKEND} db could not be reached!"
|
||||
echo
|
||||
echo "${LAST_CHECK_RESULT}"
|
||||
echo
|
||||
export EXIT_CODE=${RES}
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# if no DB configured - use sqlite db by default
|
||||
AIRFLOW__CORE__SQL_ALCHEMY_CONN="${AIRFLOW__CORE__SQL_ALCHEMY_CONN:="sqlite:///${AIRFLOW_HOME}/airflow.db"}"
|
||||
|
||||
verify_db_connection "${AIRFLOW__CORE__SQL_ALCHEMY_CONN}"
|
||||
|
||||
AIRFLOW__CELERY__BROKER_URL=${AIRFLOW__CELERY__BROKER_URL:=}
|
||||
|
||||
if [[ -n ${AIRFLOW__CELERY__BROKER_URL} ]] && \
|
||||
[[ ${AIRFLOW_COMMAND} =~ ^(scheduler|worker|flower)$ ]]; then
|
||||
verify_db_connection "${AIRFLOW__CELERY__BROKER_URL}"
|
||||
fi
|
||||
|
||||
if [[ ${AIRFLOW_COMMAND} == "" ]]; then
|
||||
exec "/bin/bash"
|
||||
fi
|
||||
|
||||
# Run the command
|
||||
exec airflow "${@}"
|
Загрузка…
Ссылка в новой задаче