2018-05-04 09:58:12 +03:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2018-09-21 15:36:09 +03:00
|
|
|
import os
|
2019-09-17 14:16:32 +03:00
|
|
|
import re
|
2020-07-01 15:50:30 +03:00
|
|
|
import subprocess
|
2019-09-17 14:16:32 +03:00
|
|
|
import time
|
2018-05-04 09:58:12 +03:00
|
|
|
import unittest
|
2020-07-01 15:50:30 +03:00
|
|
|
from datetime import datetime
|
2018-05-04 09:58:12 +03:00
|
|
|
from subprocess import check_call, check_output
|
2019-08-29 05:31:56 +03:00
|
|
|
|
2018-05-04 09:58:12 +03:00
|
|
|
import requests
|
2019-08-29 05:31:56 +03:00
|
|
|
import requests.exceptions
|
2019-04-20 18:24:46 +03:00
|
|
|
from requests.adapters import HTTPAdapter
|
|
|
|
from urllib3.util.retry import Retry
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2020-11-04 02:51:54 +03:00
|
|
|
CLUSTER_FORWARDED_PORT = os.environ.get('CLUSTER_FORWARDED_PORT') or "8080"
|
2020-10-15 12:05:58 +03:00
|
|
|
KUBERNETES_HOST_PORT = (os.environ.get('CLUSTER_HOST') or "localhost") + ":" + CLUSTER_FORWARDED_PORT
|
2020-06-03 21:58:38 +03:00
|
|
|
|
|
|
|
print()
|
|
|
|
print(f"Cluster host/port used: ${KUBERNETES_HOST_PORT}")
|
|
|
|
print()
|
2018-05-10 20:32:17 +03:00
|
|
|
|
|
|
|
|
2019-08-22 15:14:25 +03:00
|
|
|
class TestKubernetesExecutor(unittest.TestCase):
|
2020-07-01 15:50:30 +03:00
|
|
|
@staticmethod
|
|
|
|
def _describe_resources(namespace: str):
|
|
|
|
print("=" * 80)
|
2020-11-04 00:53:59 +03:00
|
|
|
print(f"Describe resources for namespace {namespace}")
|
|
|
|
print(f"Datetime: {datetime.utcnow()}")
|
2020-07-01 15:50:30 +03:00
|
|
|
print("=" * 80)
|
|
|
|
print("Describing pods")
|
|
|
|
print("-" * 80)
|
|
|
|
subprocess.call(["kubectl", "describe", "pod", "--namespace", namespace])
|
|
|
|
print("=" * 80)
|
|
|
|
print("Describing persistent volumes")
|
|
|
|
print("-" * 80)
|
|
|
|
subprocess.call(["kubectl", "describe", "pv", "--namespace", namespace])
|
|
|
|
print("=" * 80)
|
|
|
|
print("Describing persistent volume claims")
|
|
|
|
print("-" * 80)
|
|
|
|
subprocess.call(["kubectl", "describe", "pvc", "--namespace", namespace])
|
|
|
|
print("=" * 80)
|
|
|
|
|
2020-02-23 19:40:05 +03:00
|
|
|
@staticmethod
|
|
|
|
def _num_pods_in_namespace(namespace):
|
|
|
|
air_pod = check_output(['kubectl', 'get', 'pods', '-n', namespace]).decode()
|
|
|
|
air_pod = air_pod.split('\n')
|
|
|
|
names = [re.compile(r'\s+').split(x)[0] for x in air_pod if 'airflow' in x]
|
|
|
|
return len(names)
|
|
|
|
|
2018-08-08 02:18:42 +03:00
|
|
|
@staticmethod
|
2020-11-11 19:15:02 +03:00
|
|
|
def _delete_airflow_pod(name=''):
|
|
|
|
suffix = '-' + name if name else ''
|
2018-05-10 20:32:17 +03:00
|
|
|
air_pod = check_output(['kubectl', 'get', 'pods']).decode()
|
|
|
|
air_pod = air_pod.split('\n')
|
2020-11-11 19:15:02 +03:00
|
|
|
names = [re.compile(r'\s+').split(x)[0] for x in air_pod if 'airflow' + suffix in x]
|
2018-05-10 20:32:17 +03:00
|
|
|
if names:
|
|
|
|
check_call(['kubectl', 'delete', 'pod', names[0]])
|
|
|
|
|
2019-04-20 18:24:46 +03:00
|
|
|
def _get_session_with_retries(self):
|
|
|
|
session = requests.Session()
|
|
|
|
retries = Retry(total=3, backoff_factor=1)
|
|
|
|
session.mount('http://', HTTPAdapter(max_retries=retries))
|
|
|
|
session.mount('https://', HTTPAdapter(max_retries=retries))
|
|
|
|
return session
|
|
|
|
|
2019-04-28 09:48:50 +03:00
|
|
|
def _ensure_airflow_webserver_is_healthy(self):
|
|
|
|
response = self.session.get(
|
2020-11-04 00:53:59 +03:00
|
|
|
f"http://{KUBERNETES_HOST_PORT}/health",
|
2019-04-28 09:48:50 +03:00
|
|
|
timeout=1,
|
|
|
|
)
|
|
|
|
|
2021-01-17 21:00:17 +03:00
|
|
|
assert response.status_code == 200
|
2019-04-28 09:48:50 +03:00
|
|
|
|
2019-04-20 18:24:46 +03:00
|
|
|
def setUp(self):
|
|
|
|
self.session = self._get_session_with_retries()
|
2019-04-28 09:48:50 +03:00
|
|
|
self._ensure_airflow_webserver_is_healthy()
|
2019-04-20 18:24:46 +03:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.session.close()
|
|
|
|
|
2020-11-04 02:51:54 +03:00
|
|
|
def monitor_task(self, host, execution_date, dag_id, task_id, expected_final_state, timeout):
|
2018-05-10 20:32:17 +03:00
|
|
|
tries = 0
|
|
|
|
state = ''
|
|
|
|
max_tries = max(int(timeout / 5), 1)
|
2020-01-26 02:25:39 +03:00
|
|
|
# Wait some time for the operator to complete
|
2018-05-10 20:32:17 +03:00
|
|
|
while tries < max_tries:
|
|
|
|
time.sleep(5)
|
|
|
|
# Trigger a new dagrun
|
|
|
|
try:
|
2020-11-04 02:51:54 +03:00
|
|
|
get_string = (
|
|
|
|
f'http://{host}/api/experimental/dags/{dag_id}/'
|
2020-06-03 21:58:38 +03:00
|
|
|
f'dag_runs/{execution_date}/tasks/{task_id}'
|
2020-11-04 02:51:54 +03:00
|
|
|
)
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Calling [monitor_task]#1 {get_string}")
|
|
|
|
result = self.session.get(get_string)
|
2020-02-25 01:56:02 +03:00
|
|
|
if result.status_code == 404:
|
|
|
|
check_call(["echo", "api returned 404."])
|
|
|
|
tries += 1
|
|
|
|
continue
|
2021-01-17 21:00:17 +03:00
|
|
|
assert result.status_code == 200, "Could not get the status"
|
2018-05-10 20:32:17 +03:00
|
|
|
result_json = result.json()
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Received [monitor_task]#2: {result_json}")
|
2018-05-10 20:32:17 +03:00
|
|
|
state = result_json['state']
|
2020-11-04 00:53:59 +03:00
|
|
|
print(f"Attempt {tries}: Current state of operator is {state}")
|
2018-05-10 20:32:17 +03:00
|
|
|
|
|
|
|
if state == expected_final_state:
|
|
|
|
break
|
2020-07-01 15:50:30 +03:00
|
|
|
self._describe_resources(namespace="airflow")
|
|
|
|
self._describe_resources(namespace="default")
|
2018-05-10 20:32:17 +03:00
|
|
|
tries += 1
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
2020-11-04 00:53:59 +03:00
|
|
|
check_call(["echo", f"api call failed. trying again. error {e}"])
|
2020-01-26 02:25:39 +03:00
|
|
|
if state != expected_final_state:
|
2020-11-04 00:53:59 +03:00
|
|
|
print(f"The expected state is wrong {state} != {expected_final_state} (expected)!")
|
2021-01-17 21:00:17 +03:00
|
|
|
assert state == expected_final_state
|
2018-05-10 20:32:17 +03:00
|
|
|
|
2020-11-04 02:51:54 +03:00
|
|
|
def ensure_dag_expected_state(self, host, execution_date, dag_id, expected_final_state, timeout):
|
2018-05-10 20:32:17 +03:00
|
|
|
tries = 0
|
|
|
|
state = ''
|
|
|
|
max_tries = max(int(timeout / 5), 1)
|
2020-01-26 02:25:39 +03:00
|
|
|
# Wait some time for the operator to complete
|
2018-05-10 20:32:17 +03:00
|
|
|
while tries < max_tries:
|
|
|
|
time.sleep(5)
|
2020-11-04 02:51:54 +03:00
|
|
|
get_string = f'http://{host}/api/experimental/dags/{dag_id}/' f'dag_runs/{execution_date}'
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Calling {get_string}")
|
2018-05-10 20:32:17 +03:00
|
|
|
# Trigger a new dagrun
|
2020-06-03 21:58:38 +03:00
|
|
|
result = self.session.get(get_string)
|
2021-01-17 21:00:17 +03:00
|
|
|
assert result.status_code == 200, "Could not get the status"
|
2018-05-10 20:32:17 +03:00
|
|
|
result_json = result.json()
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Received: {result}")
|
2018-05-10 20:32:17 +03:00
|
|
|
state = result_json['state']
|
2020-11-04 02:51:54 +03:00
|
|
|
check_call(["echo", f"Attempt {tries}: Current state of dag is {state}"])
|
2020-11-04 00:53:59 +03:00
|
|
|
print(f"Attempt {tries}: Current state of dag is {state}")
|
2018-05-10 20:32:17 +03:00
|
|
|
|
|
|
|
if state == expected_final_state:
|
|
|
|
break
|
2020-07-01 15:50:30 +03:00
|
|
|
self._describe_resources("airflow")
|
|
|
|
self._describe_resources("default")
|
2018-05-10 20:32:17 +03:00
|
|
|
tries += 1
|
2021-01-17 21:00:17 +03:00
|
|
|
assert state == expected_final_state
|
2018-05-10 20:32:17 +03:00
|
|
|
|
|
|
|
# Maybe check if we can retrieve the logs, but then we need to extend the API
|
|
|
|
|
|
|
|
def start_dag(self, dag_id, host):
|
2020-11-04 02:51:54 +03:00
|
|
|
get_string = f'http://{host}/api/experimental/' f'dags/{dag_id}/paused/false'
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Calling [start_dag]#1 {get_string}")
|
|
|
|
result = self.session.get(get_string)
|
2018-12-31 08:03:33 +03:00
|
|
|
try:
|
|
|
|
result_json = result.json()
|
|
|
|
except ValueError:
|
|
|
|
result_json = str(result)
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Received [start_dag]#1 {result_json}")
|
2021-01-17 21:00:17 +03:00
|
|
|
assert result.status_code == 200, f"Could not enable DAG: {result_json}"
|
2020-11-04 02:51:54 +03:00
|
|
|
post_string = f'http://{host}/api/experimental/' f'dags/{dag_id}/dag_runs'
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Calling [start_dag]#2 {post_string}")
|
2018-05-04 09:58:12 +03:00
|
|
|
# Trigger a new dagrun
|
2020-06-03 21:58:38 +03:00
|
|
|
result = self.session.post(post_string, json={})
|
2018-12-31 08:03:33 +03:00
|
|
|
try:
|
|
|
|
result_json = result.json()
|
|
|
|
except ValueError:
|
|
|
|
result_json = str(result)
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Received [start_dag]#2 {result_json}")
|
2021-01-17 21:00:17 +03:00
|
|
|
assert result.status_code == 200, f"Could not trigger a DAG-run: {result_json}"
|
2018-05-04 09:58:12 +03:00
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
2020-06-03 21:58:38 +03:00
|
|
|
get_string = f'http://{host}/api/experimental/latest_runs'
|
|
|
|
print(f"Calling [start_dag]#3 {get_string}")
|
|
|
|
result = self.session.get(get_string)
|
2021-01-17 21:00:17 +03:00
|
|
|
assert result.status_code == 200, "Could not get the latest DAG-run:" " {result}".format(
|
|
|
|
result=result.json()
|
2020-11-04 02:51:54 +03:00
|
|
|
)
|
2018-05-04 09:58:12 +03:00
|
|
|
result_json = result.json()
|
2020-06-03 21:58:38 +03:00
|
|
|
print(f"Received: [start_dag]#3 {result_json}")
|
2018-05-10 20:32:17 +03:00
|
|
|
return result_json
|
|
|
|
|
2020-06-03 21:58:38 +03:00
|
|
|
def start_job_in_kubernetes(self, dag_id, host):
|
2018-07-24 03:12:09 +03:00
|
|
|
result_json = self.start_dag(dag_id=dag_id, host=host)
|
2021-01-17 21:00:17 +03:00
|
|
|
assert len(result_json['items']) > 0
|
2020-06-03 21:58:38 +03:00
|
|
|
execution_date = None
|
|
|
|
for dag_run in result_json['items']:
|
|
|
|
if dag_run['dag_id'] == dag_id:
|
|
|
|
execution_date = dag_run['execution_date']
|
|
|
|
break
|
2021-01-17 21:00:17 +03:00
|
|
|
assert execution_date is not None, f"No execution_date can be found for the dag with {dag_id}"
|
2020-06-03 21:58:38 +03:00
|
|
|
return execution_date
|
|
|
|
|
|
|
|
def test_integration_run_dag(self):
|
|
|
|
host = KUBERNETES_HOST_PORT
|
|
|
|
dag_id = 'example_kubernetes_executor_config'
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2020-06-03 21:58:38 +03:00
|
|
|
execution_date = self.start_job_in_kubernetes(dag_id, host)
|
|
|
|
print(f"Found the job with execution date {execution_date}")
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2020-01-26 02:25:39 +03:00
|
|
|
# Wait some time for the operator to complete
|
2020-11-04 02:51:54 +03:00
|
|
|
self.monitor_task(
|
|
|
|
host=host,
|
|
|
|
execution_date=execution_date,
|
|
|
|
dag_id=dag_id,
|
|
|
|
task_id='start_task',
|
|
|
|
expected_final_state='success',
|
|
|
|
timeout=300,
|
|
|
|
)
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2020-11-04 02:51:54 +03:00
|
|
|
self.ensure_dag_expected_state(
|
|
|
|
host=host,
|
|
|
|
execution_date=execution_date,
|
|
|
|
dag_id=dag_id,
|
|
|
|
expected_final_state='success',
|
|
|
|
timeout=300,
|
|
|
|
)
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2018-05-10 20:32:17 +03:00
|
|
|
def test_integration_run_dag_with_scheduler_failure(self):
|
2020-06-03 21:58:38 +03:00
|
|
|
host = KUBERNETES_HOST_PORT
|
2018-11-05 19:28:01 +03:00
|
|
|
dag_id = 'example_kubernetes_executor_config'
|
2018-05-04 09:58:12 +03:00
|
|
|
|
2020-06-03 21:58:38 +03:00
|
|
|
execution_date = self.start_job_in_kubernetes(dag_id, host)
|
2018-05-10 20:32:17 +03:00
|
|
|
|
2020-11-11 19:15:02 +03:00
|
|
|
self._delete_airflow_pod("scheduler")
|
2018-05-10 20:32:17 +03:00
|
|
|
|
|
|
|
time.sleep(10) # give time for pod to restart
|
|
|
|
|
2020-01-26 02:25:39 +03:00
|
|
|
# Wait some time for the operator to complete
|
2020-11-04 02:51:54 +03:00
|
|
|
self.monitor_task(
|
|
|
|
host=host,
|
|
|
|
execution_date=execution_date,
|
|
|
|
dag_id=dag_id,
|
|
|
|
task_id='start_task',
|
|
|
|
expected_final_state='success',
|
|
|
|
timeout=300,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.monitor_task(
|
|
|
|
host=host,
|
|
|
|
execution_date=execution_date,
|
|
|
|
dag_id=dag_id,
|
|
|
|
task_id='other_namespace_task',
|
|
|
|
expected_final_state='success',
|
|
|
|
timeout=300,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.ensure_dag_expected_state(
|
|
|
|
host=host,
|
|
|
|
execution_date=execution_date,
|
|
|
|
dag_id=dag_id,
|
|
|
|
expected_final_state='success',
|
|
|
|
timeout=300,
|
|
|
|
)
|
|
|
|
|
2021-01-17 21:00:17 +03:00
|
|
|
assert self._num_pods_in_namespace('test-namespace') == 0, "failed to delete pods in other namespace"
|