Tests are working for newly added backport providers (#9739)
* Tests are working for newly added backport providers
This commit is contained in:
Родитель
66c8af1f00
Коммит
070e3c3364
|
@ -168,7 +168,7 @@ repos:
|
||||||
types: [yaml]
|
types: [yaml]
|
||||||
exclude: ^.*init_git_sync\.template\.yaml$|^.*airflow\.template\.yaml$|^chart/templates/.*\.yaml$
|
exclude: ^.*init_git_sync\.template\.yaml$|^.*airflow\.template\.yaml$|^chart/templates/.*\.yaml$
|
||||||
- repo: https://github.com/timothycrosley/isort
|
- repo: https://github.com/timothycrosley/isort
|
||||||
rev: 5.0.3
|
rev: 5.0.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
name: Run isort to sort imports
|
name: Run isort to sort imports
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
# pylint: disable=wrong-import-order
|
||||||
#
|
#
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
# or more contributor license agreements. See the NOTICE file
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
@ -35,9 +36,19 @@ from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple,
|
||||||
|
|
||||||
from setuptools import Command, find_packages, setup as setuptools_setup
|
from setuptools import Command, find_packages, setup as setuptools_setup
|
||||||
|
|
||||||
from backport_packages.import_all_provider_classes import import_all_provider_classes
|
MY_DIR_PATH = os.path.dirname(__file__)
|
||||||
from setup import PROVIDERS_REQUIREMENTS
|
SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir))
|
||||||
from tests.deprecated_classes import HOOKS, OPERATORS, SECRETS, SENSORS, TRANSFERS
|
AIRFLOW_PATH = os.path.join(SOURCE_DIR_PATH, "airflow")
|
||||||
|
PROVIDERS_PATH = os.path.join(AIRFLOW_PATH, "providers")
|
||||||
|
|
||||||
|
sys.path.insert(0, SOURCE_DIR_PATH)
|
||||||
|
|
||||||
|
# those imports need to come after the above sys.path.insert to make sure that Airflow
|
||||||
|
# sources are importable without having to add the airflow sources to the PYTHONPATH before
|
||||||
|
# running the script
|
||||||
|
import tests.deprecated_classes # noqa # isort:skip
|
||||||
|
from backport_packages.import_all_provider_classes import import_all_provider_classes # noqa # isort:skip
|
||||||
|
from setup import PROVIDERS_REQUIREMENTS # noqa # isort:skip
|
||||||
|
|
||||||
# Note - we do not test protocols as they are not really part of the official API of
|
# Note - we do not test protocols as they are not really part of the official API of
|
||||||
# Apache Airflow
|
# Apache Airflow
|
||||||
|
@ -47,12 +58,6 @@ logger = logging.getLogger(__name__) # noqa
|
||||||
|
|
||||||
PY3 = sys.version_info[0] == 3
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
MY_DIR_PATH = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir))
|
|
||||||
AIRFLOW_PATH = os.path.join(SOURCE_DIR_PATH, "airflow")
|
|
||||||
PROVIDERS_PATH = os.path.join(AIRFLOW_PATH, "providers")
|
|
||||||
|
|
||||||
|
|
||||||
class EntityType(Enum):
|
class EntityType(Enum):
|
||||||
Operators = "Operators"
|
Operators = "Operators"
|
||||||
|
@ -184,11 +189,11 @@ import setup # From AIRFLOW_SOURCES/setup.py # noqa # isort:skip
|
||||||
DEPENDENCIES_JSON_FILE = os.path.join(PROVIDERS_PATH, "dependencies.json")
|
DEPENDENCIES_JSON_FILE = os.path.join(PROVIDERS_PATH, "dependencies.json")
|
||||||
|
|
||||||
MOVED_ENTITIES: Dict[EntityType, Dict[str, str]] = {
|
MOVED_ENTITIES: Dict[EntityType, Dict[str, str]] = {
|
||||||
EntityType.Operators: {value[0]: value[1] for value in OPERATORS},
|
EntityType.Operators: {value[0]: value[1] for value in tests.deprecated_classes.OPERATORS},
|
||||||
EntityType.Sensors: {value[0]: value[1] for value in SENSORS},
|
EntityType.Sensors: {value[0]: value[1] for value in tests.deprecated_classes.SENSORS},
|
||||||
EntityType.Hooks: {value[0]: value[1] for value in HOOKS},
|
EntityType.Hooks: {value[0]: value[1] for value in tests.deprecated_classes.HOOKS},
|
||||||
EntityType.Secrets: {value[0]: value[1] for value in SECRETS},
|
EntityType.Secrets: {value[0]: value[1] for value in tests.deprecated_classes.SECRETS},
|
||||||
EntityType.Transfers: {value[0]: value[1] for value in TRANSFERS},
|
EntityType.Transfers: {value[0]: value[1] for value in tests.deprecated_classes.TRANSFERS},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -604,8 +609,7 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
|
||||||
from airflow.secrets import BaseSecretsBackend
|
from airflow.secrets import BaseSecretsBackend
|
||||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||||
|
|
||||||
all_verified_entities: Dict[EntityType, VerifiedEntities] = {}
|
all_verified_entities: Dict[EntityType, VerifiedEntities] = {EntityType.Operators: find_all_entities(
|
||||||
all_verified_entities[EntityType.Operators] = find_all_entities(
|
|
||||||
imported_classes=imported_classes,
|
imported_classes=imported_classes,
|
||||||
base_package=full_package_name,
|
base_package=full_package_name,
|
||||||
sub_package_pattern_match=r".*\.operators\..*",
|
sub_package_pattern_match=r".*\.operators\..*",
|
||||||
|
@ -621,39 +625,35 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
|
||||||
'CloudTextToSpeechSynthesizeOperator',
|
'CloudTextToSpeechSynthesizeOperator',
|
||||||
'CloudSpeechToTextRecognizeSpeechOperator',
|
'CloudSpeechToTextRecognizeSpeechOperator',
|
||||||
}
|
}
|
||||||
)
|
), EntityType.Sensors: find_all_entities(
|
||||||
all_verified_entities[EntityType.Sensors] = find_all_entities(
|
|
||||||
imported_classes=imported_classes,
|
imported_classes=imported_classes,
|
||||||
base_package=full_package_name,
|
base_package=full_package_name,
|
||||||
sub_package_pattern_match=r".*\.sensors\..*",
|
sub_package_pattern_match=r".*\.sensors\..*",
|
||||||
ancestor_match=BaseSensorOperator,
|
ancestor_match=BaseSensorOperator,
|
||||||
expected_class_name_pattern=SENSORS_PATTERN,
|
expected_class_name_pattern=SENSORS_PATTERN,
|
||||||
unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, SENSORS_PATTERN}
|
unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, SENSORS_PATTERN}
|
||||||
)
|
), EntityType.Hooks: find_all_entities(
|
||||||
all_verified_entities[EntityType.Hooks] = find_all_entities(
|
|
||||||
imported_classes=imported_classes,
|
imported_classes=imported_classes,
|
||||||
base_package=full_package_name,
|
base_package=full_package_name,
|
||||||
sub_package_pattern_match=r".*\.hooks\..*",
|
sub_package_pattern_match=r".*\.hooks\..*",
|
||||||
ancestor_match=BaseHook,
|
ancestor_match=BaseHook,
|
||||||
expected_class_name_pattern=HOOKS_PATTERN,
|
expected_class_name_pattern=HOOKS_PATTERN,
|
||||||
unexpected_class_name_patterns=ALL_PATTERNS - {HOOKS_PATTERN}
|
unexpected_class_name_patterns=ALL_PATTERNS - {HOOKS_PATTERN}
|
||||||
)
|
), EntityType.Secrets: find_all_entities(
|
||||||
all_verified_entities[EntityType.Secrets] = find_all_entities(
|
|
||||||
imported_classes=imported_classes,
|
imported_classes=imported_classes,
|
||||||
sub_package_pattern_match=r".*\.secrets\..*",
|
sub_package_pattern_match=r".*\.secrets\..*",
|
||||||
base_package=full_package_name,
|
base_package=full_package_name,
|
||||||
ancestor_match=BaseSecretsBackend,
|
ancestor_match=BaseSecretsBackend,
|
||||||
expected_class_name_pattern=SECRETS_PATTERN,
|
expected_class_name_pattern=SECRETS_PATTERN,
|
||||||
unexpected_class_name_patterns=ALL_PATTERNS - {SECRETS_PATTERN},
|
unexpected_class_name_patterns=ALL_PATTERNS - {SECRETS_PATTERN},
|
||||||
)
|
), EntityType.Transfers: find_all_entities(
|
||||||
all_verified_entities[EntityType.Transfers] = find_all_entities(
|
|
||||||
imported_classes=imported_classes,
|
imported_classes=imported_classes,
|
||||||
base_package=full_package_name,
|
base_package=full_package_name,
|
||||||
sub_package_pattern_match=r".*\.transfers\..*",
|
sub_package_pattern_match=r".*\.transfers\..*",
|
||||||
ancestor_match=BaseOperator,
|
ancestor_match=BaseOperator,
|
||||||
expected_class_name_pattern=TRANSFERS_PATTERN,
|
expected_class_name_pattern=TRANSFERS_PATTERN,
|
||||||
unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, TRANSFERS_PATTERN},
|
unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, TRANSFERS_PATTERN},
|
||||||
)
|
)}
|
||||||
for entity in EntityType:
|
for entity in EntityType:
|
||||||
print_wrong_naming(entity, all_verified_entities[entity].wrong_entities)
|
print_wrong_naming(entity, all_verified_entities[entity].wrong_entities)
|
||||||
|
|
||||||
|
@ -706,6 +706,8 @@ def convert_git_changes_to_table(changes: str, base_url: str) -> str:
|
||||||
headers = ["Commit", "Committed", "Subject"]
|
headers = ["Commit", "Committed", "Subject"]
|
||||||
table_data = []
|
table_data = []
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
if line == "":
|
||||||
|
continue
|
||||||
full_hash, short_hash, date, message = line.split(" ", maxsplit=3)
|
full_hash, short_hash, date, message = line.split(" ", maxsplit=3)
|
||||||
table_data.append((f"[{short_hash}]({base_url}{full_hash})", date, message))
|
table_data.append((f"[{short_hash}]({base_url}{full_hash})", date, message))
|
||||||
return tabulate(table_data, headers=headers, tablefmt="pipe")
|
return tabulate(table_data, headers=headers, tablefmt="pipe")
|
||||||
|
@ -799,15 +801,16 @@ def get_all_releases(provider_package_path: str) -> List[ReleaseInfo]:
|
||||||
content = changes_file.read()
|
content = changes_file.read()
|
||||||
found = re.search(r'/([a-z0-9]*)\)', content, flags=re.MULTILINE)
|
found = re.search(r'/([a-z0-9]*)\)', content, flags=re.MULTILINE)
|
||||||
if not found:
|
if not found:
|
||||||
raise Exception(f"Commit not found in {changes_file_path}. Something is wrong there.")
|
print("No commit found. This seems to be first time you run it", file=sys.stderr)
|
||||||
last_commit_hash = found.group(1)
|
else:
|
||||||
release_version = file_name[len(PROVIDERS_CHANGES_PREFIX):][:-3]
|
last_commit_hash = found.group(1)
|
||||||
past_releases.append(
|
release_version = file_name[len(PROVIDERS_CHANGES_PREFIX):][:-3]
|
||||||
ReleaseInfo(release_version=release_version,
|
past_releases.append(
|
||||||
release_version_no_leading_zeros=strip_leading_zeros(release_version),
|
ReleaseInfo(release_version=release_version,
|
||||||
last_commit_hash=last_commit_hash,
|
release_version_no_leading_zeros=strip_leading_zeros(release_version),
|
||||||
content=content,
|
last_commit_hash=last_commit_hash,
|
||||||
file_name=file_name))
|
content=content,
|
||||||
|
file_name=file_name))
|
||||||
return past_releases
|
return past_releases
|
||||||
|
|
||||||
|
|
||||||
|
@ -818,7 +821,15 @@ def get_latest_release(provider_package_path: str) -> ReleaseInfo:
|
||||||
:param provider_package_path: path of package
|
:param provider_package_path: path of package
|
||||||
:return: latest release information
|
:return: latest release information
|
||||||
"""
|
"""
|
||||||
return get_all_releases(provider_package_path=provider_package_path)[0]
|
releases = get_all_releases(provider_package_path=provider_package_path)
|
||||||
|
if len(releases) == 0:
|
||||||
|
return ReleaseInfo(release_version="0.0.0",
|
||||||
|
release_version_no_leading_zeros="0.0.0",
|
||||||
|
last_commit_hash="no_hash",
|
||||||
|
content="empty",
|
||||||
|
file_name="no_file")
|
||||||
|
else:
|
||||||
|
return releases[0]
|
||||||
|
|
||||||
|
|
||||||
def get_previous_release_info(previous_release_version: str,
|
def get_previous_release_info(previous_release_version: str,
|
||||||
|
|
Загрузка…
Ссылка в новой задаче