[AIRFLOW-6987] Avoid creating default connections (#7629)
This commit is contained in:
Родитель
51161dbd9d
Коммит
ae035cdb69
|
@ -184,13 +184,22 @@
|
|||
default: "16"
|
||||
- name: load_examples
|
||||
description: |
|
||||
Whether to load the examples that ship with Airflow. It's good to
|
||||
Whether to load the DAG examples that ship with Airflow. It's good to
|
||||
get started, but you probably want to set this to False in a production
|
||||
environment
|
||||
version_added: ~
|
||||
type: string
|
||||
example: ~
|
||||
default: "True"
|
||||
- name: load_default_connections
|
||||
description: |
|
||||
Whether to load the default connections that ship with Airflow. It's good to
|
||||
get started, but you probably want to set this to False in a production
|
||||
environment
|
||||
version_added: 1.10.10
|
||||
type: string
|
||||
example: ~
|
||||
default: "True"
|
||||
- name: plugins_folder
|
||||
description: |
|
||||
Where your Airflow plugins are stored
|
||||
|
|
|
@ -119,11 +119,16 @@ dags_are_paused_at_creation = True
|
|||
# The maximum number of active DAG runs per DAG
|
||||
max_active_runs_per_dag = 16
|
||||
|
||||
# Whether to load the examples that ship with Airflow. It's good to
|
||||
# Whether to load the DAG examples that ship with Airflow. It's good to
|
||||
# get started, but you probably want to set this to False in a production
|
||||
# environment
|
||||
load_examples = True
|
||||
|
||||
# Whether to load the default connections that ship with Airflow. It's good to
|
||||
# get started, but you probably want to set this to False in a production
|
||||
# environment
|
||||
load_default_connections = True
|
||||
|
||||
# Where your Airflow plugins are stored
|
||||
plugins_folder = {AIRFLOW_HOME}/plugins
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ plugins_folder = {TEST_PLUGINS_FOLDER}
|
|||
executor = SequentialExecutor
|
||||
sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/unittests.db
|
||||
load_examples = True
|
||||
load_default_connections = True
|
||||
donot_pickle = True
|
||||
dag_concurrency = 16
|
||||
dags_are_paused_at_creation = False
|
||||
|
|
|
@ -503,7 +503,8 @@ def initdb():
|
|||
"""
|
||||
upgradedb()
|
||||
|
||||
create_default_connections()
|
||||
if conf.getboolean('core', 'LOAD_DEFAULT_CONNECTIONS'):
|
||||
create_default_connections()
|
||||
|
||||
dagbag = DagBag()
|
||||
# Save DAGs in the ORM
|
||||
|
|
|
@ -23,6 +23,7 @@ sql_alchemy_conn = # overridden by the startup scripts
|
|||
#sql_engine_collation_for_ids = overridden by the startup scripts
|
||||
unit_test_mode = True
|
||||
load_examples = True
|
||||
load_default_connections = True
|
||||
donot_pickle = False
|
||||
dags_are_paused_at_creation = False
|
||||
default_impersonation =
|
||||
|
|
|
@ -27,6 +27,7 @@ data:
|
|||
executor = KubernetesExecutor
|
||||
parallelism = 32
|
||||
load_examples = False
|
||||
load_default_connections = True
|
||||
plugins_folder = /root/airflow/plugins
|
||||
sql_alchemy_conn = $SQL_ALCHEMY_CONN
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ DAG_FOLDER = os.path.join(os.path.dirname(__file__), "dags")
|
|||
os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = DAG_FOLDER
|
||||
os.environ["AIRFLOW__DEBUG__SQLALCHEMY_STATS"] = "True"
|
||||
os.environ["AIRFLOW__CORE__LOAD_EXAMPLES"] = "False"
|
||||
os.environ["AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS"] = "True"
|
||||
|
||||
# Here we setup simpler logger to avoid any code changes in
|
||||
# Airflow core code base
|
||||
|
|
|
@ -118,6 +118,8 @@ class TestConf(unittest.TestCase):
|
|||
cfg_dict = conf.as_dict(display_source=True)
|
||||
self.assertEqual(
|
||||
cfg_dict['core']['load_examples'][1], 'airflow.cfg')
|
||||
self.assertEqual(
|
||||
cfg_dict['core']['load_default_connections'][1], 'airflow.cfg')
|
||||
self.assertEqual(
|
||||
cfg_dict['testsection']['testkey'], ('< hidden >', 'env var'))
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче