[AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888)
This commit is contained in:
Родитель
667b8b4d1a
Коммит
57da456854
|
@ -22,6 +22,7 @@ import os
|
|||
import random
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
|
||||
from tabulate import tabulate
|
||||
|
||||
|
@ -162,7 +163,7 @@ def users_import(args):
|
|||
json_file = getattr(args, 'import')
|
||||
if not os.path.exists(json_file):
|
||||
print("File '{}' does not exist")
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
users_list = None # pylint: disable=redefined-outer-name
|
||||
try:
|
||||
|
@ -170,7 +171,7 @@ def users_import(args):
|
|||
users_list = json.loads(file.read())
|
||||
except ValueError as e:
|
||||
print("File '{}' is not valid JSON. Error: {}".format(json_file, e))
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
users_created, users_updated = _import_users(users_list)
|
||||
if users_created:
|
||||
|
@ -194,7 +195,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name
|
|||
if not role:
|
||||
valid_roles = appbuilder.sm.get_all_roles()
|
||||
print("Error: '{}' is not a valid role. Valid roles are: {}".format(rolename, valid_roles))
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
else:
|
||||
roles.append(role)
|
||||
|
||||
|
@ -204,7 +205,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name
|
|||
if not user.get(field):
|
||||
print("Error: '{}' is a required field, but was not "
|
||||
"specified".format(field))
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
existing_user = appbuilder.sm.find_user(email=user['email'])
|
||||
if existing_user:
|
||||
|
@ -217,7 +218,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name
|
|||
print("Error: Changing the username is not allowed - "
|
||||
"please delete and recreate the user with "
|
||||
"email '{}'".format(user['email']))
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
appbuilder.sm.update_user(existing_user)
|
||||
users_updated.append(user['email'])
|
||||
|
|
|
@ -157,7 +157,7 @@ class BaseExecutor(LoggingMixin):
|
|||
:param open_slots: Number of open slots
|
||||
"""
|
||||
sorted_queue = sorted(
|
||||
[(k, v) for k, v in self.queued_tasks.items()],
|
||||
[(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension
|
||||
key=lambda x: x[1][1],
|
||||
reverse=True)
|
||||
for _ in range(min((open_slots, len(self.queued_tasks)))):
|
||||
|
|
|
@ -241,7 +241,7 @@ class CeleryExecutor(BaseExecutor):
|
|||
:return: List of tuples from the queued_tasks according to the priority.
|
||||
"""
|
||||
return sorted(
|
||||
[(k, v) for k, v in self.queued_tasks.items()],
|
||||
[(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension
|
||||
key=lambda x: x[1][1],
|
||||
reverse=True)
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ class DebugExecutor(BaseExecutor):
|
|||
:param open_slots: Number of open slots
|
||||
"""
|
||||
sorted_queue = sorted(
|
||||
[(k, v) for k, v in self.queued_tasks.items()],
|
||||
[(k, v) for k, v in self.queued_tasks.items()], # pylint: disable=unnecessary-comprehension
|
||||
key=lambda x: x[1][1],
|
||||
reverse=True,
|
||||
)
|
||||
|
|
|
@ -2207,8 +2207,7 @@ class BigQueryCursor(BigQueryBaseCursor):
|
|||
one = self.fetchone()
|
||||
if one is None:
|
||||
break
|
||||
else:
|
||||
result.append(one)
|
||||
result.append(one)
|
||||
return result
|
||||
|
||||
def fetchall(self) -> List[List]:
|
||||
|
@ -2221,8 +2220,7 @@ class BigQueryCursor(BigQueryBaseCursor):
|
|||
one = self.fetchone()
|
||||
if one is None:
|
||||
break
|
||||
else:
|
||||
result.append(one)
|
||||
result.append(one)
|
||||
return result
|
||||
|
||||
def get_arraysize(self) -> int:
|
||||
|
|
|
@ -794,7 +794,7 @@ class BaseOperator(Operator, LoggingMixin):
|
|||
if self.template_ext: # pylint: disable=too-many-nested-blocks
|
||||
for field in self.template_fields:
|
||||
content = getattr(self, field, None)
|
||||
if content is None:
|
||||
if content is None: # pylint: disable=no-else-continue
|
||||
continue
|
||||
elif isinstance(content, str) and \
|
||||
any([content.endswith(ext) for ext in self.template_ext]):
|
||||
|
|
|
@ -261,7 +261,7 @@ for p in plugins:
|
|||
if p.stat_name_handler:
|
||||
stat_name_handlers.append(p.stat_name_handler)
|
||||
global_operator_extra_links.extend(p.global_operator_extra_links)
|
||||
operator_extra_links.extend([ope for ope in p.operator_extra_links])
|
||||
operator_extra_links.extend(list(p.operator_extra_links))
|
||||
|
||||
registered_operator_link_classes.update({
|
||||
"{}.{}".format(link.__class__.__module__,
|
||||
|
|
|
@ -608,9 +608,7 @@ class S3Hook(AwsHook):
|
|||
keys to delete.
|
||||
:type keys: str or list
|
||||
"""
|
||||
if isinstance(keys, list):
|
||||
keys = keys
|
||||
else:
|
||||
if isinstance(keys, str):
|
||||
keys = [keys]
|
||||
|
||||
delete_dict = {"Objects": [{"Key": k} for k in keys]}
|
||||
|
|
|
@ -109,7 +109,7 @@ class SimpleDag(BaseDag):
|
|||
return self._concurrency
|
||||
|
||||
@property
|
||||
def is_paused(self) -> bool:
|
||||
def is_paused(self) -> bool: # pylint: disable=invalid-overridden-method
|
||||
"""
|
||||
:return: whether this DAG is paused or not
|
||||
:rtype: bool
|
||||
|
@ -117,7 +117,7 @@ class SimpleDag(BaseDag):
|
|||
return self._is_paused
|
||||
|
||||
@property
|
||||
def pickle_id(self) -> Optional[str]:
|
||||
def pickle_id(self) -> Optional[str]: # pylint: disable=invalid-overridden-method
|
||||
"""
|
||||
:return: The pickle ID for this DAG, if it has one. Otherwise None.
|
||||
:rtype: unicode
|
||||
|
@ -635,6 +635,7 @@ class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc
|
|||
while True:
|
||||
loop_start_time = time.time()
|
||||
|
||||
# pylint: disable=no-else-break
|
||||
if self._signal_conn.poll(poll_time):
|
||||
agent_signal = self._signal_conn.recv()
|
||||
self.log.debug("Recived %s singal from DagFileProcessorAgent", agent_signal)
|
||||
|
@ -652,7 +653,7 @@ class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc
|
|||
# are told to (as that would open another connection to the
|
||||
# SQLite DB which isn't a good practice
|
||||
continue
|
||||
|
||||
# pylint: enable=no-else-break
|
||||
self._refresh_dag_dir()
|
||||
self._find_zombies() # pylint: disable=no-value-for-parameter
|
||||
|
||||
|
|
3
pylintrc
3
pylintrc
|
@ -169,7 +169,8 @@ disable=print-statement,
|
|||
import-error, # Requires installing Airflow environment in CI task which takes long, therefore ignored. Tests should fail anyways if deps are missing. Possibly un-ignore in the future if we ever use pre-built Docker images for CI.
|
||||
fixme, # There should be a good reason for adding a TODO
|
||||
pointless-statement, # Is raised on the bitshift operator. Could be disabled only on /example_dags after https://github.com/PyCQA/pylint/projects/1.
|
||||
ungrouped-imports # Disabled to avoid conflict with isort import order rules, which is enabled in the project.
|
||||
ungrouped-imports, # Disabled to avoid conflict with isort import order rules, which is enabled in the project.
|
||||
import-outside-toplevel, # We import outside toplevel to avoid cyclic imports
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
|
3
setup.py
3
setup.py
|
@ -384,8 +384,7 @@ devel = [
|
|||
'parameterized',
|
||||
'paramiko',
|
||||
'pre-commit',
|
||||
'pylint~=2.3.1', # to be upgraded after fixing https://github.com/PyCQA/pylint/issues/3123
|
||||
# We should also disable checking docstring at the module level
|
||||
'pylint~=2.4',
|
||||
'pysftp',
|
||||
'pytest',
|
||||
'pytest-cov',
|
||||
|
|
|
@ -40,18 +40,22 @@ class TestMsSqlToHiveTransfer(unittest.TestCase):
|
|||
dag=None
|
||||
)
|
||||
|
||||
# pylint: disable=c-extension-no-member
|
||||
def test_type_map_binary(self):
|
||||
mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.BINARY.value)
|
||||
mapped_type = MsSqlToHiveTransfer(
|
||||
**self.kwargs).type_map(pymssql.BINARY.value) # pylint: disable=c-extension-no-member
|
||||
|
||||
self.assertEqual(mapped_type, 'INT')
|
||||
|
||||
def test_type_map_decimal(self):
|
||||
mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.DECIMAL.value)
|
||||
mapped_type = MsSqlToHiveTransfer(
|
||||
**self.kwargs).type_map(pymssql.DECIMAL.value) # pylint: disable=c-extension-no-member
|
||||
|
||||
self.assertEqual(mapped_type, 'FLOAT')
|
||||
|
||||
def test_type_map_number(self):
|
||||
mapped_type = MsSqlToHiveTransfer(**self.kwargs).type_map(pymssql.NUMBER.value)
|
||||
mapped_type = MsSqlToHiveTransfer(
|
||||
**self.kwargs).type_map(pymssql.NUMBER.value) # pylint: disable=c-extension-no-member
|
||||
|
||||
self.assertEqual(mapped_type, 'INT')
|
||||
|
||||
|
|
|
@ -71,6 +71,6 @@ def print_all_cases(xunit_test_file_path):
|
|||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
print("Please provide name of xml unit file as first parameter")
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
file_name = sys.argv[1]
|
||||
print_all_cases(file_name)
|
||||
|
|
|
@ -138,10 +138,7 @@ class FakeElasticsearch(Elasticsearch):
|
|||
def find_document(self, doc_type, id, index, result):
|
||||
for document in self.__documents_dict[index]:
|
||||
if document.get('_id') == id:
|
||||
if doc_type == '_all':
|
||||
result = document
|
||||
break
|
||||
elif document.get('_type') == doc_type:
|
||||
if doc_type == '_all' or document.get('_type') == doc_type:
|
||||
result = document
|
||||
break
|
||||
return result
|
||||
|
|
Загрузка…
Ссылка в новой задаче