This commit is contained in:
John Bampton 2020-12-17 19:53:35 +10:00 коммит произвёл GitHub
Родитель 8d5b4349f5
Коммит 8529cb1c7d
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
13 изменённых файлов: 35 добавлений и 35 удалений

Просмотреть файл

@ -219,7 +219,7 @@ ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
# By default latest released version of airflow is installed (when empty) but this value can be overriden
# By default latest released version of airflow is installed (when empty) but this value can be overridden
# and we can install specific version of airflow this way.
ARG AIRFLOW_INSTALL_VERSION=""
ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION}
@ -413,7 +413,7 @@ RUN addgroup --gid "${AIRFLOW_GID}" "airflow" && \
ARG AIRFLOW_HOME
ENV AIRFLOW_HOME=${AIRFLOW_HOME}
# Make Airflow files belong to the root group and are accessible. This is to accomodate the guidelines from
# Make Airflow files belong to the root group and are accessible. This is to accommodate the guidelines from
# OpenShift https://docs.openshift.com/enterprise/3.0/creating_images/guidelines.html
RUN mkdir -pv "${AIRFLOW_HOME}"; \
mkdir -pv "${AIRFLOW_HOME}/dags"; \

Просмотреть файл

@ -624,7 +624,7 @@ The entrypoint performs those operations:
* Sets up Kerberos if Kerberos integration is enabled (generates and configures Kerberos token)
* Sets up ssh keys for ssh tests and restarts teh SSH server
* Sets up ssh keys for ssh tests and restarts the SSH server
* Sets all variables and configurations needed for unit tests to run

Просмотреть файл

@ -1537,7 +1537,7 @@ Migrated are:
#### `airflow.providers.amazon.aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator`
The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidently set to 's3_default' instead of 'aws_default' in some of the emr operators in previous
The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidentally set to 's3_default' instead of 'aws_default' in some of the emr operators in previous
versions. This was leading to EmrStepSensor not being able to find their corresponding emr cluster. With the new
changes in the EmrAddStepsOperator, EmrTerminateJobFlowOperator and EmrCreateJobFlowOperator this issue is
solved.

Просмотреть файл

@ -1338,7 +1338,7 @@ paths:
get:
summary: Get a instance status
description: |
Get the status of Airflow's metadatabase and scheduler. It incluse info about
Get the status of Airflow's metadatabase and scheduler. It includes info about
metadatabase and last heartbeat of scheduler.
x-openapi-router-controller: airflow.api_connexion.endpoints.health_endpoint
operationId: get_health
@ -1883,7 +1883,7 @@ components:
$ref: '#/components/schemas/XComCollectionItem'
XCom:
description: Full representaiton of XCom entry.
description: Full representations of XCom entry.
allOf:
- $ref: '#/components/schemas/XComCollectionItem'
- type: object

Просмотреть файл

@ -97,7 +97,7 @@ if conf.getboolean("sentry", 'sentry_on', fallback=False):
sentry_config_opts.pop("sentry_on")
old_way_dsn = sentry_config_opts.pop("sentry_dsn", None)
new_way_dsn = sentry_config_opts.pop("dsn", None)
# supported backward compability with old way dsn option
# supported backward compatibility with old way dsn option
dsn = old_way_dsn or new_way_dsn
unsupported_options = self.UNSUPPORTED_SENTRY_OPTIONS.intersection(sentry_config_opts.keys())

Просмотреть файл

@ -143,7 +143,7 @@
}
var devicePixelRatio = window.devicePixelRatio || 1;
// JSON.parse is faster for large payloads than an object literal (because the JSON grammer is simpler!)
// JSON.parse is faster for large payloads than an object literal (because the JSON grammar is simpler!)
var data = JSON.parse({{ data|tojson }});
var barHeight = 20;
var axisHeight = 40;

Просмотреть файл

@ -316,7 +316,7 @@ Task ids are generated by appending a number at the end of the original task id.
the following task ids: ``[update_user, update_user__1, update_user__2, ... update_user__n]``.
Due to dynamic nature of the ids generations users should be aware that changing a DAG by adding or removing additional
invocations of task-decorated function may change ``task_id`` of other task of the same type withing a single DAG.
invocations of task-decorated function may change ``task_id`` of other task of the same type within a single DAG.
For example, if there are many task-decorated tasks without explicitly given task_id. Their ``task_id`` will be
generated sequentially: ``task__1``, ``task__2``, ``task__3``, etc. After the DAG goes into production, one day

Просмотреть файл

@ -141,7 +141,7 @@ function build_images::confirm_image_rebuild() {
fi
if [[ -f "${LAST_FORCE_ANSWER_FILE}" ]]; then
# set variable from last answered response given in the same pre-commit run - so that it can be
# answered in teh first pre-commit check (build) and then used in another (pylint/mypy/flake8 etc).
# answered in the first pre-commit check (build) and then used in another (pylint/mypy/flake8 etc).
# shellcheck disable=SC1090
source "${LAST_FORCE_ANSWER_FILE}"
fi

Просмотреть файл

@ -516,7 +516,7 @@ devel_hadoop = devel_minreq + hdfs + hive + kerberos + presto + webhdfs
############################################################################################################
# IMPORTANT NOTE!!!!!!!!!!!!!!!
# If you have a 'pip check' problem with dependencies, it might be becasue some dependency has been
# If you have a 'pip check' problem with dependencies, it might be because some dependency has been
# installed via 'install_requires' in setup.cfg in higher version than required in one of the options below.
# For example pip check was failing with requests=2.25.1 installed even if in some dependencies below
# < 2.24.0 was specified for it. Solution in such case is to add such limiting requirement to

Просмотреть файл

@ -384,7 +384,7 @@ key3 = value3
self.assertEqual(
None,
test_conf.getsection('non_existant_secion'),
test_conf.getsection('non_existent_section'),
)
def test_get_section_should_respect_cmd_env_variable(self):

Просмотреть файл

@ -93,7 +93,7 @@ class TestLineage(unittest.TestCase):
def test_lineage_render(self):
# tests inlets / outlets are rendered if they are added
# after initalization
# after initialization
dag = DAG(dag_id='test_lineage_render', start_date=DEFAULT_DATE)
with dag:

Просмотреть файл

@ -414,7 +414,7 @@ class TestTaskInstance(unittest.TestCase):
@provide_session
def test_ti_updates_with_task(self, session=None):
"""
test that updating the executor_config propogates to the TaskInstance DB
test that updating the executor_config propagates to the TaskInstance DB
"""
with models.DAG(dag_id='test_run_pooling_task') as dag:
task = DummyOperator(

Просмотреть файл

@ -28,7 +28,7 @@ from airflow.providers.google.suite.hooks.sheets import GSheetsHook
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
GCP_CONN_ID = 'test'
SPREADHSEET_ID = '1234567890'
SPREADSHEET_ID = '1234567890'
RANGE_ = 'test!A:E'
RANGES = ['test!A:Q', 'test!R:Z']
VALUES = [[1, 2, 3]]
@ -66,7 +66,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method = get_method.return_value.execute
execute_method.return_value = {"values": VALUES}
result = self.hook.get_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
major_dimension=MAJOR_DIMENSION,
value_render_option=VALUE_RENDER_OPTION,
@ -75,7 +75,7 @@ class TestGSheetsHook(unittest.TestCase):
self.assertIs(result, VALUES)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
get_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
majorDimension=MAJOR_DIMENSION,
valueRenderOption=VALUE_RENDER_OPTION,
@ -88,7 +88,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method = batch_get_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_get_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=RANGES,
major_dimension=MAJOR_DIMENSION,
value_render_option=VALUE_RENDER_OPTION,
@ -97,7 +97,7 @@ class TestGSheetsHook(unittest.TestCase):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_get_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
ranges=RANGES,
majorDimension=MAJOR_DIMENSION,
valueRenderOption=VALUE_RENDER_OPTION,
@ -110,7 +110,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method = update_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
values=VALUES,
major_dimension=MAJOR_DIMENSION,
@ -123,7 +123,7 @@ class TestGSheetsHook(unittest.TestCase):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
update_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
valueInputOption=VALUE_INPUT_OPTION,
includeValuesInResponse=INCLUDE_VALUES_IN_RESPONSE,
@ -138,7 +138,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method = batch_update_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=RANGES,
values=VALUES_BATCH,
major_dimension=MAJOR_DIMENSION,
@ -160,7 +160,7 @@ class TestGSheetsHook(unittest.TestCase):
}
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_update_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, body=body)
batch_update_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, body=body)
@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_batch_update_values_with_bad_data(self, get_conn):
@ -169,7 +169,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method.return_value = API_RESPONSE
with self.assertRaises(AirflowException) as cm:
self.hook.batch_update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=['test!A1:B2', 'test!C1:C2'],
values=[[1, 2, 3]], # bad data
major_dimension=MAJOR_DIMENSION,
@ -189,7 +189,7 @@ class TestGSheetsHook(unittest.TestCase):
execute_method = append_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.append_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
values=VALUES,
major_dimension=MAJOR_DIMENSION,
@ -203,7 +203,7 @@ class TestGSheetsHook(unittest.TestCase):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
append_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
valueInputOption=VALUE_INPUT_OPTION,
insertDataOption=INSERT_DATA_OPTION,
@ -218,31 +218,31 @@ class TestGSheetsHook(unittest.TestCase):
clear_method = get_conn.return_value.spreadsheets.return_value.values.return_value.clear
execute_method = clear_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.clear(spreadsheet_id=SPREADHSEET_ID, range_=RANGE_)
result = self.hook.clear(spreadsheet_id=SPREADSHEET_ID, range_=RANGE_)
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
clear_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, range=RANGE_)
clear_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, range=RANGE_)
@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_batch_clear_values(self, get_conn):
batch_clear_method = get_conn.return_value.spreadsheets.return_value.values.return_value.batchClear
execute_method = batch_clear_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_clear(spreadsheet_id=SPREADHSEET_ID, ranges=RANGES)
result = self.hook.batch_clear(spreadsheet_id=SPREADSHEET_ID, ranges=RANGES)
body = {"ranges": RANGES}
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_clear_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, body=body)
batch_clear_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, body=body)
@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_get_spreadsheet(self, mock_get_conn):
get_mock = mock_get_conn.return_value.spreadsheets.return_value.get
get_mock.return_value.execute.return_value = API_RESPONSE
result = self.hook.get_spreadsheet(spreadsheet_id=SPREADHSEET_ID)
result = self.hook.get_spreadsheet(spreadsheet_id=SPREADSHEET_ID)
get_mock.assert_called_once_with(spreadsheetId=SPREADHSEET_ID)
get_mock.assert_called_once_with(spreadsheetId=SPREADSHEET_ID)
assert result == API_RESPONSE
@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_spreadsheet")
@ -251,11 +251,11 @@ class TestGSheetsHook(unittest.TestCase):
sheet2 = {"properties": {"title": "title2"}}
mock_get_spreadsheet.return_value = {"sheets": [sheet1, sheet2]}
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADHSEET_ID)
mock_get_spreadsheet.assert_called_once_with(spreadsheet_id=SPREADHSEET_ID)
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADSHEET_ID)
mock_get_spreadsheet.assert_called_once_with(spreadsheet_id=SPREADSHEET_ID)
assert result == ["title1", "title2"]
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADHSEET_ID, sheet_filter=["title1"])
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADSHEET_ID, sheet_filter=["title1"])
assert result == ["title1"]
@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")