Replace file.io with artifacts (#10137)
This commit is contained in:
Родитель
000287753b
Коммит
e92d50eda7
|
@ -28,6 +28,7 @@ on:
|
|||
env:
|
||||
|
||||
MOUNT_LOCAL_SOURCES: "false"
|
||||
MOUNT_FILES: "true"
|
||||
FORCE_ANSWER_TO_QUESTIONS: "yes"
|
||||
SKIP_CHECK_REMOTE_IMAGE: "true"
|
||||
SKIP_CI_IMAGE_CHECK: "true"
|
||||
|
@ -108,6 +109,20 @@ jobs:
|
|||
run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
|
||||
- name: "Prepare & test backport packages"
|
||||
run: "./scripts/ci/backport_packages/ci_prepare_and_test_backport_packages.sh"
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload packages
|
||||
if: always()
|
||||
with:
|
||||
name: >
|
||||
airflow-backport-packages
|
||||
path: './files/airflow-packages-*'
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload readmes
|
||||
if: always()
|
||||
with:
|
||||
name: >
|
||||
airflow-backport-readmes
|
||||
path: './files/airflow-backport-readme*'
|
||||
|
||||
trigger-tests:
|
||||
timeout-minutes: 5
|
||||
|
@ -177,7 +192,6 @@ jobs:
|
|||
run: ./scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload KinD logs
|
||||
# Always run this, even if one of th previous steps failed.
|
||||
if: always()
|
||||
with:
|
||||
name: 'kind-logs-${{matrix.kube-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}}'
|
||||
|
@ -212,6 +226,12 @@ jobs:
|
|||
run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
|
||||
- name: "Tests"
|
||||
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload airflow logs
|
||||
if: always()
|
||||
with:
|
||||
name: 'airflow-logs-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.postgres-version}}'
|
||||
path: './files/airflow_logs*'
|
||||
|
||||
tests-mysql:
|
||||
timeout-minutes: 80
|
||||
|
@ -242,6 +262,12 @@ jobs:
|
|||
run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
|
||||
- name: "Tests"
|
||||
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload airflow logs
|
||||
if: always()
|
||||
with:
|
||||
name: 'airflow-logs-${{matrix.test-type}}-${{matrix.python-version}}-${{matrix.mysql-version}}'
|
||||
path: './files/airflow_logs*'
|
||||
|
||||
tests-sqlite:
|
||||
timeout-minutes: 80
|
||||
|
@ -256,8 +282,8 @@ jobs:
|
|||
env:
|
||||
BACKEND: sqlite
|
||||
PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
|
||||
TEST_TYPE: ${{ matrix.test-type }}
|
||||
RUN_TESTS: "true"
|
||||
TEST_TYPE: ${{ matrix.test-type }}
|
||||
if: needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -270,6 +296,12 @@ jobs:
|
|||
run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
|
||||
- name: "Tests"
|
||||
run: ./scripts/ci/testing/ci_run_airflow_testing.sh
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload airflow logs
|
||||
if: always()
|
||||
with:
|
||||
name: 'airflow-logs-${{matrix.test-type}}-${{matrix.python-version}}'
|
||||
path: './files/airflow_logs*'
|
||||
|
||||
helm-tests:
|
||||
timeout-minutes: 5
|
||||
|
@ -383,7 +415,7 @@ jobs:
|
|||
name: Upload constraint artifacts
|
||||
with:
|
||||
name: 'constraints-${{matrix.python-version}}'
|
||||
path: 'files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt'
|
||||
path: './files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt'
|
||||
|
||||
constraints-push:
|
||||
timeout-minutes: 10
|
||||
|
|
|
@ -114,3 +114,9 @@ jobs:
|
|||
with:
|
||||
name: 'quarantined_tests'
|
||||
path: 'files/test_result.xml'
|
||||
- uses: actions/upload-artifact@v2
|
||||
name: Upload airflow logs
|
||||
if: always()
|
||||
with:
|
||||
name: airflow-logs-quarantined-${{matrix.python-version}}-${{matrix.kubernetes-version}}
|
||||
path: './files/airflow_logs*'
|
||||
|
|
|
@ -391,7 +391,7 @@ class FileIoException(Exception):
|
|||
after=tenacity.after_log(log, logging.DEBUG),
|
||||
)
|
||||
def _upload_text_to_fileio(content):
|
||||
"""Uload text file to File.io service and return lnk"""
|
||||
"""Upload text file to File.io service and return lnk"""
|
||||
resp = requests.post("https://file.io", files={"file": ("airflow-report.txt", content)})
|
||||
if not resp.ok:
|
||||
raise FileIoException("Failed to send report to file.io service.")
|
||||
|
|
11
breeze
11
breeze
|
@ -97,6 +97,9 @@ function setup_default_breeze_variables() {
|
|||
# By default we mount local Airflow sources
|
||||
MOUNT_LOCAL_SOURCES="true"
|
||||
|
||||
# By default we mount files folder
|
||||
MOUNT_FILES="true"
|
||||
|
||||
# Holds last sub-command used - this is used by --help flag to print help for the command entered
|
||||
LAST_SUBCOMMAND=""
|
||||
|
||||
|
@ -492,6 +495,7 @@ function prepare_command_files() {
|
|||
MAIN_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/base.yml
|
||||
BACKEND_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml
|
||||
LOCAL_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local.yml
|
||||
FILES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/files.yml
|
||||
LOCAL_PROD_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local-prod.yml
|
||||
REMOVE_SOURCES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml
|
||||
FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml
|
||||
|
@ -504,6 +508,11 @@ function prepare_command_files() {
|
|||
COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${LOCAL_PROD_DOCKER_COMPOSE_FILE}
|
||||
fi
|
||||
|
||||
if [[ "${MOUNT_FILES}" != "false" ]]; then
|
||||
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${FILES_DOCKER_COMPOSE_FILE}
|
||||
COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${FILES_DOCKER_COMPOSE_FILE}
|
||||
fi
|
||||
|
||||
if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
|
||||
COMPOSE_CI_FILE=${COMPOSE_CI_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE}
|
||||
COMPOSE_PROD_FILE=${COMPOSE_PROD_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE}
|
||||
|
@ -2125,8 +2134,10 @@ function run_breeze_command {
|
|||
enter_breeze)
|
||||
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_PROD_FILE}" run --service-ports --rm airflow "${@}"
|
||||
"${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"
|
||||
else
|
||||
"${BUILD_CACHE_DIR}/${LAST_DC_CI_FILE}" run --service-ports --rm airflow "${@}"
|
||||
"${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"
|
||||
"${SCRIPTS_CI_DIR}/tools/ci_clear_tmp.sh"
|
||||
fi
|
||||
;;
|
||||
|
|
|
@ -131,7 +131,7 @@ The script generates two types of files:
|
|||
|
||||
Note that our CI system builds the release notes for backport packages automatically with every build and
|
||||
current date - this way you might be sure the automated generation of the release notes continues to
|
||||
work. You can also preview the generated readme files (by downloading artifacts uploaded to file.io).
|
||||
work. You can also preview the generated readme files (by downloading artifacts from Github Actions).
|
||||
The script does not modify the README and CHANGES files if there is no change in the repo for that provider.
|
||||
|
||||
# Preparing backport packages
|
||||
|
|
|
@ -25,15 +25,9 @@ export PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:-3.6}
|
|||
"${SCRIPTS_CI_DIR}/backport_packages/ci_test_backport_packages_install_separately.sh"
|
||||
"${SCRIPTS_CI_DIR}/backport_packages/ci_test_backport_packages_import_all_classes.sh"
|
||||
|
||||
cd "${AIRFLOW_SOURCES}/backport_packages" || exit 1
|
||||
|
||||
DUMP_FILE="/tmp/airflow_provider_packages_$(date +"%Y%m%d-%H%M%S").tar.gz"
|
||||
|
||||
cd "${AIRFLOW_SOURCES}/dist" || exit 1
|
||||
tar -cvzf "${DUMP_FILE}" .
|
||||
|
||||
echo "Packages are in dist and also tar-gzipped in ${DUMP_FILE}"
|
||||
|
||||
if [[ "${CI:=false}" == "true" ]]; then
|
||||
curl -F "file=@${DUMP_FILE}" https://file.io
|
||||
fi
|
||||
|
|
|
@ -49,6 +49,12 @@ services:
|
|||
- ENABLED_SYSTEMS
|
||||
- RUN_SYSTEM_TESTS
|
||||
- PYTHON_MAJOR_MINOR_VERSION
|
||||
- HOST_USER_ID
|
||||
- HOST_GROUP_ID
|
||||
- HOST_HOME=${HOME}
|
||||
- HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES}
|
||||
- HOST_OS
|
||||
- PYTHONDONTWRITEBYTECODE
|
||||
volumes:
|
||||
# Pass docker to inside of the container so that Kind and Moto tests can use it.
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
---
|
||||
version: "2.2"
|
||||
services:
|
||||
airflow:
|
||||
environment:
|
||||
- GITHUB_HEAD_REF
|
||||
- GITHUB_REF
|
||||
- GITHUB_ACTIONS
|
||||
- GITHUB_SHA
|
||||
- GITHUB_REPOSITORY
|
||||
- GITHUB_RUN_ID
|
||||
- GITHUB_TOKEN
|
|
@ -0,0 +1,22 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
---
|
||||
version: "2.2"
|
||||
services:
|
||||
airflow:
|
||||
volumes:
|
||||
- ../../../files:/files:cached
|
|
@ -29,7 +29,6 @@ services:
|
|||
- ../../../.github:/opt/airflow/.github:cached
|
||||
- ../../../.inputrc:/root/.inputrc:cached
|
||||
- ../../../.kube:/root/.kube:cached
|
||||
- ../../../files:/files:cached
|
||||
- ../../../dist:/dist:cached
|
||||
- ../../../scripts/prod/entrypoint_prod.sh:/entrypoint:cached
|
||||
- ../../../setup.cfg:/opt/airflow/setup.cfg:cached
|
||||
|
|
|
@ -45,7 +45,6 @@ services:
|
|||
- ../../../dags:/opt/airflow/dags:cached
|
||||
- ../../../dev:/opt/airflow/dev:cached
|
||||
- ../../../docs:/opt/airflow/docs:cached
|
||||
- ../../../files:/files:cached
|
||||
- ../../../dist:/dist:cached
|
||||
- ../../../hooks:/opt/airflow/hooks:cached
|
||||
- ../../../logs:/root/airflow/logs:cached
|
||||
|
@ -60,12 +59,5 @@ services:
|
|||
- ../../../tmp:/tmp:cached
|
||||
- ../../../metastore_browser:/opt/airflow/metastore_browser:cached
|
||||
# END automatically generated volumes from LOCAL_MOUNTS in _local_mounts.sh
|
||||
environment:
|
||||
- HOST_USER_ID
|
||||
- HOST_GROUP_ID
|
||||
- HOST_HOME=${HOME}
|
||||
- HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES}
|
||||
- HOST_OS
|
||||
- PYTHONDONTWRITEBYTECODE
|
||||
ports:
|
||||
- "${WEBSERVER_HOST_PORT}:8080"
|
||||
|
|
|
@ -248,31 +248,18 @@ function setup_kerberos() {
|
|||
}
|
||||
|
||||
function dump_airflow_logs() {
|
||||
DUMP_FILE=/files/airflow_logs_$(date "+%Y-%m-%d")_${CI_BUILD_ID:="default"}_${CI_JOB_ID:="default"}.log.tar.gz
|
||||
echo "###########################################################################################"
|
||||
echo " Dumping logs from all the airflow tasks"
|
||||
echo "###########################################################################################"
|
||||
pushd /root/airflow/ || exit 1
|
||||
tar -czf "${1}" logs
|
||||
pushd "${AIRFLOW_HOME}" || exit 1
|
||||
tar -czf "${DUMP_FILE}" logs
|
||||
echo " Logs dumped to ${DUMP_FILE}"
|
||||
popd || exit 1
|
||||
echo "###########################################################################################"
|
||||
}
|
||||
|
||||
|
||||
function send_airflow_logs_to_file_io() {
|
||||
echo "##############################################################################"
|
||||
echo
|
||||
echo " DUMPING LOG FILES FROM AIRFLOW AND SENDING THEM TO file.io"
|
||||
echo
|
||||
echo "##############################################################################"
|
||||
DUMP_FILE=/tmp/$(date "+%Y-%m-%d")_airflow_${CI_BUILD_ID:="default"}_${CI_JOB_ID:="default"}.log.tar.gz
|
||||
dump_airflow_logs "${DUMP_FILE}"
|
||||
echo
|
||||
echo " Logs saved to ${DUMP_FILE}"
|
||||
echo
|
||||
echo "##############################################################################"
|
||||
curl -F "file=@${DUMP_FILE}" https://file.io
|
||||
}
|
||||
|
||||
function install_released_airflow_version() {
|
||||
pip uninstall -y apache-airflow || true
|
||||
find /root/airflow/ -type f -print0 | xargs -0 rm -f --
|
||||
|
|
|
@ -59,7 +59,7 @@ else
|
|||
fi
|
||||
|
||||
if [[ ${CI:=} == "true" ]]; then
|
||||
send_airflow_logs_to_file_io
|
||||
dump_airflow_logs
|
||||
fi
|
||||
|
||||
exit "${RES}"
|
||||
|
|
|
@ -170,16 +170,9 @@ fi
|
|||
|
||||
popd
|
||||
|
||||
AIRFLOW_PACKAGES_TGZ_FILE="/tmp/airflow-packages-$(date +"%Y%m%d-%H%M%S")-${VERSION_SUFFIX_FOR_SVN}${VERSION_SUFFIX_FOR_PYPI}.tar.gz"
|
||||
AIRFLOW_PACKAGES_TGZ_FILE="/files/airflow-packages-$(date +"%Y%m%d-%H%M%S")-${VERSION_SUFFIX_FOR_SVN}${VERSION_SUFFIX_FOR_PYPI}.tar.gz"
|
||||
|
||||
tar -cvzf "${AIRFLOW_PACKAGES_TGZ_FILE}" dist/*.whl dist/*.tar.gz
|
||||
echo
|
||||
echo "Airflow packages are in dist folder and tar-gzipped in ${AIRFLOW_PACKAGES_TGZ_FILE}"
|
||||
echo
|
||||
if [[ "${CI:=false}" == "true" ]]; then
|
||||
echo
|
||||
echo "Sending all airflow packages to file.io"
|
||||
echo
|
||||
curl -F "file=@${AIRFLOW_PACKAGES_TGZ_FILE}" https://file.io
|
||||
echo
|
||||
fi
|
||||
|
|
|
@ -41,7 +41,7 @@ cd "${AIRFLOW_SOURCES}/backport_packages" || exit 1
|
|||
|
||||
python3 setup_backport_packages.py update-package-release-notes "$@"
|
||||
|
||||
AIRFLOW_BACKPORT_README_TGZ_FILE="/dist/airflow-backport-readme-$(date +"%Y-%m-%d-%H.%M.%S").tar.gz"
|
||||
AIRFLOW_BACKPORT_README_TGZ_FILE="/files/airflow-backport-readme-$(date +"%Y-%m-%d-%H.%M.%S").tar.gz"
|
||||
|
||||
cd "${AIRFLOW_SOURCES}" || exit 1
|
||||
|
||||
|
@ -50,10 +50,3 @@ find airflow/providers \( -name 'README.md' -o -name 'PROVIDERS_CHANGES*' \) -pr
|
|||
echo
|
||||
echo "Airflow readme for backport packages are tar-gzipped in ${AIRFLOW_BACKPORT_README_TGZ_FILE}"
|
||||
echo
|
||||
if [[ "${CI:=false}" == "true" ]]; then
|
||||
echo
|
||||
echo "Sending all airflow packages to file.io"
|
||||
echo
|
||||
curl -F "file=@${AIRFLOW_PACKAGES_TGZ_FILE}" https://file.io
|
||||
echo
|
||||
fi
|
||||
|
|
|
@ -47,7 +47,7 @@ if [[ "${RES}" == "0" && ${CI} == "true" ]]; then
|
|||
fi
|
||||
|
||||
if [[ ${CI} == "true" ]]; then
|
||||
send_airflow_logs_to_file_io
|
||||
dump_airflow_logs
|
||||
fi
|
||||
|
||||
in_container_script_end
|
||||
|
|
|
@ -138,6 +138,7 @@ function initialize_common_environment {
|
|||
print_info
|
||||
|
||||
read -r -a EXTRA_DOCKER_FLAGS <<< "$(convert_local_mounts_to_docker_params)"
|
||||
EXTRA_DOCKER_FLAGS+=("-v" "${AIRFLOW_SOURCES}/files:/files" )
|
||||
else
|
||||
print_info
|
||||
print_info "Skip mounting host volumes to Docker"
|
||||
|
|
|
@ -41,7 +41,6 @@ function generate_local_mounts_list {
|
|||
"$prefix"dags:/opt/airflow/dags:cached
|
||||
"$prefix"dev:/opt/airflow/dev:cached
|
||||
"$prefix"docs:/opt/airflow/docs:cached
|
||||
"$prefix"files:/files:cached
|
||||
"$prefix"dist:/dist:cached
|
||||
"$prefix"hooks:/opt/airflow/hooks:cached
|
||||
"$prefix"logs:/root/airflow/logs:cached
|
||||
|
|
|
@ -64,10 +64,13 @@ export BACKEND=${BACKEND:="sqlite"}
|
|||
# Whether necessary for airflow run local sources are mounted to docker
|
||||
export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="false"}
|
||||
|
||||
# whethere verbose output should be produced
|
||||
# Whether files folder is mounted to docker
|
||||
export MOUNT_FILES=${MOUNT_FILES:="true"}
|
||||
|
||||
# whether verbose output should be produced
|
||||
export VERBOSE=${VERBOSE:="false"}
|
||||
|
||||
# whethere verbose commadns output (set-x) should be used
|
||||
# whether verbose commands output (set -x) should be used
|
||||
export VERBOSE_COMMANDS=${VERBOSE_COMMANDS:="false"}
|
||||
|
||||
# Forwards host credentials to the container
|
||||
|
@ -76,10 +79,18 @@ export FORWARD_CREDENTIALS=${FORWARD_CREDENTIALS:="false"}
|
|||
# Installs different airflow version than current from the sources
|
||||
export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""}
|
||||
|
||||
DOCKER_COMPOSE_LOCAL=()
|
||||
|
||||
if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
|
||||
DOCKER_COMPOSE_LOCAL=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml")
|
||||
else
|
||||
DOCKER_COMPOSE_LOCAL=()
|
||||
DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml")
|
||||
fi
|
||||
|
||||
if [[ ${MOUNT_FILES} == "true" ]]; then
|
||||
DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml")
|
||||
fi
|
||||
|
||||
if [[ ${CI} == "true" ]]; then
|
||||
DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ci.yml")
|
||||
fi
|
||||
|
||||
if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
|
||||
|
@ -116,7 +127,6 @@ done
|
|||
|
||||
RUN_INTEGRATION_TESTS=${RUN_INTEGRATION_TESTS:=""}
|
||||
|
||||
|
||||
run_airflow_testing_in_docker "${@}"
|
||||
|
||||
if [[ ${TEST_TYPE:=} == "Quarantined" ]]; then
|
||||
|
|
|
@ -40,5 +40,6 @@ export HOST_OS
|
|||
docker-compose \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/local.yml" \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/files.yml" \
|
||||
run --entrypoint /bin/bash \
|
||||
airflow -c /opt/airflow/scripts/ci/in_container/run_clear_tmp.sh
|
||||
|
|
|
@ -41,6 +41,7 @@ export BACKEND="sqlite"
|
|||
docker-compose \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/local.yml" \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/files.yml" \
|
||||
-f "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml" \
|
||||
run --entrypoint /bin/bash \
|
||||
airflow -c /opt/airflow/scripts/ci/in_container/run_fix_ownership.sh
|
||||
|
|
Загрузка…
Ссылка в новой задаче