From 57da45685457520d51a0967e2aeb5e5ff162dfa7 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 24 Dec 2019 00:17:02 +0000 Subject: [PATCH] [AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888) --- airflow/cli/commands/user_command.py | 11 ++++++----- airflow/executors/base_executor.py | 2 +- airflow/executors/celery_executor.py | 2 +- airflow/executors/debug_executor.py | 2 +- airflow/gcp/hooks/bigquery.py | 6 ++---- airflow/models/baseoperator.py | 2 +- airflow/plugins_manager.py | 2 +- airflow/providers/amazon/aws/hooks/s3.py | 4 +--- airflow/utils/dag_processing.py | 7 ++++--- pylintrc | 3 ++- setup.py | 3 +-- tests/operators/test_mssql_to_hive.py | 10 +++++++--- tests/test_utils/get_all_tests.py | 2 +- tests/utils/log/elasticmock/fake_elasticsearch.py | 5 +---- 14 files changed, 30 insertions(+), 31 deletions(-) diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py index b155d0ab34..a3db7bb3be 100644 --- a/airflow/cli/commands/user_command.py +++ b/airflow/cli/commands/user_command.py @@ -22,6 +22,7 @@ import os import random import re import string +import sys from tabulate import tabulate @@ -162,7 +163,7 @@ def users_import(args): json_file = getattr(args, 'import') if not os.path.exists(json_file): print("File '{}' does not exist") - exit(1) + sys.exit(1) users_list = None # pylint: disable=redefined-outer-name try: @@ -170,7 +171,7 @@ def users_import(args): users_list = json.loads(file.read()) except ValueError as e: print("File '{}' is not valid JSON. Error: {}".format(json_file, e)) - exit(1) + sys.exit(1) users_created, users_updated = _import_users(users_list) if users_created: @@ -194,7 +195,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name if not role: valid_roles = appbuilder.sm.get_all_roles() print("Error: '{}' is not a valid role. Valid roles are: {}".format(rolename, valid_roles)) - exit(1) + sys.exit(1) else: roles.append(role) @@ -204,7 +205,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name if not user.get(field): print("Error: '{}' is a required field, but was not " "specified".format(field)) - exit(1) + sys.exit(1) existing_user = appbuilder.sm.find_user(email=user['email']) if existing_user: @@ -217,7 +218,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name print("Error: Changing the username is not allowed - " "please delete and recreate the user with " "email '{}'".format(user['email'])) - exit(1) + sys.exit(1) appbuilder.sm.update_user(existing_user) users_updated.append(user['email']) diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index 4e99d14cfe..7978a6608f 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -157,7 +157,7 @@ class BaseExecutor(LoggingMixin): :param open_slots: Number of open slots """ sorted_queue = sorted( - [(k, v) for k, v in self.queued_tasks.items()], + [(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension key=lambda x: x[1][1], reverse=True) for _ in range(min((open_slots, len(self.queued_tasks)))): diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 906531757d..1ce20d5aa8 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -241,7 +241,7 @@ class CeleryExecutor(BaseExecutor): :return: List of tuples from the queued_tasks according to the priority. """ return sorted( - [(k, v) for k, v in self.queued_tasks.items()], + [(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension key=lambda x: x[1][1], reverse=True) diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index c57380fc72..58f0cb785f 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -122,7 +122,7 @@ class DebugExecutor(BaseExecutor): :param open_slots: Number of open slots """ sorted_queue = sorted( - [(k, v) for k, v in self.queued_tasks.items()], + [(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension key=lambda x: x[1][1], reverse=True, ) diff --git a/airflow/gcp/hooks/bigquery.py b/airflow/gcp/hooks/bigquery.py index 96b25ce0fb..b95c02e4e7 100644 --- a/airflow/gcp/hooks/bigquery.py +++ b/airflow/gcp/hooks/bigquery.py @@ -2207,8 +2207,7 @@ class BigQueryCursor(BigQueryBaseCursor): one = self.fetchone() if one is None: break - else: - result.append(one) + result.append(one) return result def fetchall(self) -> List[List]: @@ -2221,8 +2220,7 @@ class BigQueryCursor(BigQueryBaseCursor): one = self.fetchone() if one is None: break - else: - result.append(one) + result.append(one) return result def get_arraysize(self) -> int: diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index b179d67d00..40ffdf0ee3 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -794,7 +794,7 @@ class BaseOperator(Operator, LoggingMixin): if self.template_ext: # pylint: disable=too-many-nested-blocks for field in self.template_fields: content = getattr(self, field, None) - if content is None: + if content is None: # pylint: disable=no-else-continue continue elif isinstance(content, str) and \ any([content.endswith(ext) for ext in self.template_ext]): diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index 5e3bb1725e..31a743be72 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -261,7 +261,7 @@ for p in plugins: if p.stat_name_handler: stat_name_handlers.append(p.stat_name_handler) global_operator_extra_links.extend(p.global_operator_extra_links) - operator_extra_links.extend([ope for ope in p.operator_extra_links]) + operator_extra_links.extend(list(p.operator_extra_links)) registered_operator_link_classes.update({ "{}.{}".format(link.__class__.__module__, diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index 7a1fc2daf4..4a5af73fbf 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -608,9 +608,7 @@ class S3Hook(AwsHook): keys to delete. :type keys: str or list """ - if isinstance(keys, list): - keys = keys - else: + if isinstance(keys, str): keys = [keys] delete_dict = {"Objects": [{"Key": k} for k in keys]} diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 0e8ea1770f..97b8860035 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -109,7 +109,7 @@ class SimpleDag(BaseDag): return self._concurrency @property - def is_paused(self) -> bool: + def is_paused(self) -> bool: # pylint: disable=invalid-overridden-method """ :return: whether this DAG is paused or not :rtype: bool @@ -117,7 +117,7 @@ class SimpleDag(BaseDag): return self._is_paused @property - def pickle_id(self) -> Optional[str]: + def pickle_id(self) -> Optional[str]: # pylint: disable=invalid-overridden-method """ :return: The pickle ID for this DAG, if it has one. Otherwise None. :rtype: unicode @@ -635,6 +635,7 @@ class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc while True: loop_start_time = time.time() + # pylint: disable=no-else-break if self._signal_conn.poll(poll_time): agent_signal = self._signal_conn.recv() self.log.debug("Recived %s singal from DagFileProcessorAgent", agent_signal) @@ -652,7 +653,7 @@ class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc # are told to (as that would open another connection to the # SQLite DB which isn't a good practice continue - + # pylint: enable=no-else-break self._refresh_dag_dir() self._find_zombies() # pylint: disable=no-value-for-parameter diff --git a/pylintrc b/pylintrc index 425fdf5caf..f0fd0f6d35 100644 --- a/pylintrc +++ b/pylintrc @@ -169,7 +169,8 @@ disable=print-statement, import-error, # Requires installing Airflow environment in CI task which takes long, therefore ignored. Tests should fail anyways if deps are missing. Possibly un-ignore in the future if we ever use pre-built Docker images for CI. fixme, # There should be a good reason for adding a TODO pointless-statement, # Is raised on the bitshift operator. Could be disabled only on /example_dags after https://github.com/PyCQA/pylint/projects/1. - ungrouped-imports # Disabled to avoid conflict with isort import order rules, which is enabled in the project. + ungrouped-imports, # Disabled to avoid conflict with isort import order rules, which is enabled in the project. + import-outside-toplevel, # We import outside toplevel to avoid cyclic imports # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option diff --git a/setup.py b/setup.py index c8e5392b3b..41c5faebdc 100644 --- a/setup.py +++ b/setup.py @@ -384,8 +384,7 @@ devel = [ 'parameterized', 'paramiko', 'pre-commit', - 'pylint~=2.3.1', # to be upgraded after fixing https://github.com/PyCQA/pylint/issues/3123 - # We should also disable checking docstring at the module level + 'pylint~=2.4', 'pysftp', 'pytest', 'pytest-cov', diff --git a/tests/operators/test_mssql_to_hive.py b/tests/operators/test_mssql_to_hive.py index 40e0680230..997b37cf03 100644 --- a/tests/operators/test_mssql_to_hive.py +++ b/tests/operators/test_mssql_to_hive.py @@ -40,18 +40,22 @@ class TestMsSqlToHiveTransfer(unittest.TestCase): dag=None ) + # pylint: disable=c-extension-no-member def test_type_map_binary(self): - mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.BINARY.value) + mapped_type = MsSqlToHiveTransfer( + **self.kwargs).type_map(pymssql.BINARY.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'INT') def test_type_map_decimal(self): - mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.DECIMAL.value) + mapped_type = MsSqlToHiveTransfer( + **self.kwargs).type_map(pymssql.DECIMAL.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'FLOAT') def test_type_map_number(self): - mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.NUMBER.value) + mapped_type = MsSqlToHiveTransfer( + **self.kwargs).type_map(pymssql.NUMBER.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'INT') diff --git a/tests/test_utils/get_all_tests.py b/tests/test_utils/get_all_tests.py index 989210a7c2..0ac4028c19 100644 --- a/tests/test_utils/get_all_tests.py +++ b/tests/test_utils/get_all_tests.py @@ -71,6 +71,6 @@ def print_all_cases(xunit_test_file_path): if __name__ == '__main__': if len(sys.argv) < 2: print("Please provide name of xml unit file as first parameter") - exit(1) + sys.exit(1) file_name = sys.argv[1] print_all_cases(file_name) diff --git a/tests/utils/log/elasticmock/fake_elasticsearch.py b/tests/utils/log/elasticmock/fake_elasticsearch.py index a4c4646bc4..b845d9f243 100644 --- a/tests/utils/log/elasticmock/fake_elasticsearch.py +++ b/tests/utils/log/elasticmock/fake_elasticsearch.py @@ -138,10 +138,7 @@ class FakeElasticsearch(Elasticsearch): def find_document(self, doc_type, id, index, result): for document in self.__documents_dict[index]: if document.get('_id') == id: - if doc_type == '_all': - result = document - break - elif document.get('_type') == doc_type: + if doc_type == '_all' or document.get('_type') == doc_type: result = document break return result