Use literal syntax instead of function calls to create data structure (#9516)
It is slower to call e.g. dict() than using the empty literal, because the name dict must be looked up in the global scope in case it has been rebound. Same for the other two types like list() and tuple().
This commit is contained in:
Родитель
d914a9c3f6
Коммит
87fdbd0708
|
@ -145,6 +145,7 @@ metastore_browser/templates/.*\\.html$|.*\\.jinja2"
|
|||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: debug-statements
|
||||
- id: check-builtin-literals
|
||||
- id: detect-private-key
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
|
|
|
@ -226,10 +226,10 @@ class BaseExecutor(LoggingMixin):
|
|||
:param dag_ids: to dag_ids to return events for, if None returns all
|
||||
:return: a dict of events
|
||||
"""
|
||||
cleared_events: Dict[TaskInstanceKeyType, EventBufferValueType] = dict()
|
||||
cleared_events: Dict[TaskInstanceKeyType, EventBufferValueType] = {}
|
||||
if dag_ids is None:
|
||||
cleared_events = self.event_buffer
|
||||
self.event_buffer = dict()
|
||||
self.event_buffer = {}
|
||||
else:
|
||||
for key in list(self.event_buffer.keys()):
|
||||
dag_id, _, _, _ = key
|
||||
|
|
|
@ -104,7 +104,7 @@ class BackfillJob(BaseJob):
|
|||
total_runs=0,
|
||||
):
|
||||
self.to_run = to_run or OrderedDict()
|
||||
self.running = running or dict()
|
||||
self.running = running or {}
|
||||
self.skipped = skipped or set()
|
||||
self.succeeded = succeeded or set()
|
||||
self.failed = failed or set()
|
||||
|
|
|
@ -257,7 +257,7 @@ class DAG(BaseDag, LoggingMixin):
|
|||
# set file location to caller source path
|
||||
back = sys._getframe().f_back
|
||||
self.fileloc = back.f_code.co_filename if back else ""
|
||||
self.task_dict: Dict[str, BaseOperator] = dict()
|
||||
self.task_dict: Dict[str, BaseOperator] = {}
|
||||
|
||||
# set timezone from start_date
|
||||
if start_date and start_date.tzinfo:
|
||||
|
@ -1277,7 +1277,7 @@ class DAG(BaseDag, LoggingMixin):
|
|||
raise TaskNotFound("Task {task_id} not found".format(task_id=task_id))
|
||||
|
||||
def pickle_info(self):
|
||||
d = dict()
|
||||
d = {}
|
||||
d['is_picklable'] = True
|
||||
try:
|
||||
dttm = timezone.utcnow()
|
||||
|
|
|
@ -92,7 +92,7 @@ class DagCode(Base):
|
|||
orm_dag_code.fileloc: orm_dag_code for orm_dag_code in existing_orm_dag_codes
|
||||
}
|
||||
else:
|
||||
existing_orm_dag_codes_map = dict()
|
||||
existing_orm_dag_codes_map = {}
|
||||
|
||||
existing_orm_dag_codes_by_fileloc_hashes = {
|
||||
orm.fileloc_hash: orm for orm in existing_orm_dag_codes
|
||||
|
|
|
@ -89,7 +89,7 @@ class AwsBaseHook(BaseHook):
|
|||
aws_secret_access_key = None
|
||||
aws_session_token = None
|
||||
endpoint_url = None
|
||||
session_kwargs = dict()
|
||||
session_kwargs = {}
|
||||
|
||||
if self.aws_conn_id: # pylint: disable=too-many-nested-blocks
|
||||
self.log.info("Airflow Connection: aws_conn_id=%s",
|
||||
|
@ -187,7 +187,7 @@ class AwsBaseHook(BaseHook):
|
|||
)
|
||||
sts_client = sts_session.client("sts", config=self.config)
|
||||
|
||||
assume_role_kwargs = dict()
|
||||
assume_role_kwargs = {}
|
||||
if "assume_role_kwargs" in extra_config:
|
||||
assume_role_kwargs = extra_config["assume_role_kwargs"]
|
||||
|
||||
|
|
|
@ -763,7 +763,7 @@ class SageMakerHook(AwsBaseHook):
|
|||
:return: results of the list_training_jobs request
|
||||
"""
|
||||
|
||||
config = dict()
|
||||
config = {}
|
||||
|
||||
if name_contains:
|
||||
if "NameContains" in kwargs:
|
||||
|
@ -806,7 +806,7 @@ class SageMakerHook(AwsBaseHook):
|
|||
next_token = None
|
||||
|
||||
while True:
|
||||
kwargs = dict()
|
||||
kwargs = {}
|
||||
if next_token is not None:
|
||||
kwargs["NextToken"] = next_token
|
||||
|
||||
|
|
|
@ -137,16 +137,16 @@ class AWSDataSyncOperator(BaseOperator):
|
|||
self.allow_random_task_choice = allow_random_task_choice
|
||||
self.allow_random_location_choice = allow_random_location_choice
|
||||
|
||||
self.create_task_kwargs = create_task_kwargs if create_task_kwargs else dict()
|
||||
self.create_source_location_kwargs = dict()
|
||||
self.create_task_kwargs = create_task_kwargs if create_task_kwargs else {}
|
||||
self.create_source_location_kwargs = {}
|
||||
if create_source_location_kwargs:
|
||||
self.create_source_location_kwargs = create_source_location_kwargs
|
||||
self.create_destination_location_kwargs = dict()
|
||||
self.create_destination_location_kwargs = {}
|
||||
if create_destination_location_kwargs:
|
||||
self.create_destination_location_kwargs = create_destination_location_kwargs
|
||||
|
||||
self.update_task_kwargs = update_task_kwargs if update_task_kwargs else dict()
|
||||
self.task_execution_kwargs = task_execution_kwargs if task_execution_kwargs else dict()
|
||||
self.update_task_kwargs = update_task_kwargs if update_task_kwargs else {}
|
||||
self.task_execution_kwargs = task_execution_kwargs if task_execution_kwargs else {}
|
||||
self.delete_task_after_execution = delete_task_after_execution
|
||||
|
||||
# Validations
|
||||
|
|
|
@ -1042,7 +1042,7 @@ class BigQueryCreateExternalTableOperator(BaseOperator):
|
|||
self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
|
||||
self.delegate_to = delegate_to
|
||||
|
||||
self.src_fmt_configs = src_fmt_configs or dict()
|
||||
self.src_fmt_configs = src_fmt_configs or {}
|
||||
self.labels = labels
|
||||
self.encryption_configuration = encryption_configuration
|
||||
self.location = location
|
||||
|
|
|
@ -255,7 +255,7 @@ class BigtableCreateTableOperator(BaseOperator, BigtableValidationMixin):
|
|||
self.instance_id = instance_id
|
||||
self.table_id = table_id
|
||||
self.initial_split_keys = initial_split_keys or []
|
||||
self.column_families = column_families or dict()
|
||||
self.column_families = column_families or {}
|
||||
self._validate_inputs()
|
||||
self.gcp_conn_id = gcp_conn_id
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
@ -208,7 +208,7 @@ class ClusterGenerator:
|
|||
self.custom_image = custom_image
|
||||
self.custom_image_project_id = custom_image_project_id
|
||||
self.image_version = image_version
|
||||
self.properties = properties or dict()
|
||||
self.properties = properties or {}
|
||||
self.optional_components = optional_components
|
||||
self.master_machine_type = master_machine_type
|
||||
self.master_disk_type = master_disk_type
|
||||
|
|
|
@ -307,7 +307,7 @@ class CassandraToGCSOperator(BaseOperator):
|
|||
"""
|
||||
Generates BQ schema.
|
||||
"""
|
||||
field_schema: Dict[str, Any] = dict()
|
||||
field_schema: Dict[str, Any] = {}
|
||||
field_schema.update({'name': name})
|
||||
field_schema.update({'type_': cls.get_bq_type(type_)})
|
||||
field_schema.update({'mode': cls.get_bq_mode(type_)})
|
||||
|
|
|
@ -59,7 +59,7 @@ def build_gcp_conn(
|
|||
conn = "google-cloud-platform://?{}"
|
||||
extras = "extra__google_cloud_platform"
|
||||
|
||||
query_params = dict()
|
||||
query_params = {}
|
||||
if key_file_path:
|
||||
query_params["{}__key_path".format(extras)] = key_file_path
|
||||
if scopes:
|
||||
|
|
|
@ -84,7 +84,7 @@ class FileProcessorHandler(logging.Handler):
|
|||
|
||||
def _render_filename(self, filename):
|
||||
filename = os.path.relpath(filename, self.dag_dir)
|
||||
ctx = dict()
|
||||
ctx = {}
|
||||
ctx['filename'] = filename
|
||||
|
||||
if self.filename_jinja_template:
|
||||
|
|
|
@ -72,7 +72,7 @@ class StreamLogWriter:
|
|||
"""
|
||||
self.logger = logger
|
||||
self.level = level
|
||||
self._buffer = str()
|
||||
self._buffer = ''
|
||||
|
||||
@property
|
||||
def closed(self): # noqa: D402
|
||||
|
@ -101,7 +101,7 @@ class StreamLogWriter:
|
|||
else:
|
||||
self._buffer += message
|
||||
self._propagate_log(self._buffer.rstrip())
|
||||
self._buffer = str()
|
||||
self._buffer = ''
|
||||
|
||||
def flush(self):
|
||||
"""
|
||||
|
@ -109,7 +109,7 @@ class StreamLogWriter:
|
|||
"""
|
||||
if len(self._buffer) > 0:
|
||||
self._propagate_log(self._buffer)
|
||||
self._buffer = str()
|
||||
self._buffer = ''
|
||||
|
||||
def isatty(self):
|
||||
"""
|
||||
|
|
|
@ -46,7 +46,7 @@ def context_to_airflow_vars(context, in_env_var_format=False):
|
|||
:type in_env_var_format: bool
|
||||
:return: task_instance context as dict.
|
||||
"""
|
||||
params = dict()
|
||||
params = {}
|
||||
if in_env_var_format:
|
||||
name_format = 'env_var_format'
|
||||
else:
|
||||
|
|
|
@ -29,7 +29,7 @@ def configure_manifest_files(app):
|
|||
:param app:
|
||||
:return:
|
||||
"""
|
||||
manifest = dict()
|
||||
manifest = {}
|
||||
|
||||
def parse_manifest_json():
|
||||
# noinspection PyBroadException
|
||||
|
|
|
@ -604,7 +604,7 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
|
|||
from airflow.hooks.base_hook import BaseHook
|
||||
from airflow.models.baseoperator import BaseOperator
|
||||
|
||||
all_verified_entities: Dict[EntityType, VerifiedEntities] = dict()
|
||||
all_verified_entities: Dict[EntityType, VerifiedEntities] = {}
|
||||
all_verified_entities[EntityType.Operators] = find_all_entities(
|
||||
imported_classes=imported_classes,
|
||||
base_package=full_package_name,
|
||||
|
@ -657,7 +657,7 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
|
|||
for entity in EntityType:
|
||||
print_wrong_naming(entity, all_verified_entities[entity].wrong_entities)
|
||||
|
||||
entities_summary: Dict[EntityType, EntityTypeSummary] = dict() # noqa
|
||||
entities_summary: Dict[EntityType, EntityTypeSummary] = {} # noqa
|
||||
|
||||
for entity_type in EntityType:
|
||||
entities_summary[entity_type] = get_details_about_classes(
|
||||
|
|
|
@ -127,7 +127,7 @@ class BaseParameters:
|
|||
self.password = password
|
||||
self.version = version
|
||||
self.version_rc = version_rc
|
||||
self.template_arguments = dict()
|
||||
self.template_arguments = {}
|
||||
|
||||
def __repr__(self):
|
||||
return f"Apache Credentials: {self.email}/{self.username}/{self.version}/{self.version_rc}"
|
||||
|
|
|
@ -317,7 +317,7 @@ class TestDag(unittest.TestCase):
|
|||
start_date=DEFAULT_DATE,
|
||||
default_args={'owner': 'owner1'})
|
||||
|
||||
self.assertEqual(tuple(), dag.topological_sort())
|
||||
self.assertEqual((), dag.topological_sort())
|
||||
|
||||
def test_dag_naive_start_date_string(self):
|
||||
DAG('DAG', default_args={'start_date': '2019-06-01'})
|
||||
|
|
|
@ -74,7 +74,7 @@ class TestStats(unittest.TestCase):
|
|||
self.statsd_client.incr.assert_called_once_with('test_stats_run', 1, 1)
|
||||
|
||||
def test_stat_name_must_be_a_string(self):
|
||||
self.stats.incr(list())
|
||||
self.stats.incr([])
|
||||
self.statsd_client.assert_not_called()
|
||||
|
||||
def test_stat_name_must_not_exceed_max_length(self):
|
||||
|
@ -152,7 +152,7 @@ class TestDogStats(unittest.TestCase):
|
|||
)
|
||||
|
||||
def test_stat_name_must_be_a_string_with_dogstatsd(self):
|
||||
self.dogstatsd.incr(list())
|
||||
self.dogstatsd.incr([])
|
||||
self.dogstatsd_client.assert_not_called()
|
||||
|
||||
def test_stat_name_must_not_exceed_max_length_with_dogstatsd(self):
|
||||
|
|
|
@ -81,7 +81,7 @@ class FakeElasticsearch(Elasticsearch):
|
|||
'routing', 'timeout', 'timestamp', 'ttl', 'version', 'version_type')
|
||||
def index(self, index, doc_type, body, id=None, params=None):
|
||||
if index not in self.__documents_dict:
|
||||
self.__documents_dict[index] = list()
|
||||
self.__documents_dict[index] = []
|
||||
|
||||
if id is None:
|
||||
id = get_random_id()
|
||||
|
|
Загрузка…
Ссылка в новой задаче