Update FlaskAppBuilder to v3 (#9648)
This commit is contained in:
Родитель
72abf824ce
Коммит
e764ea5811
|
@ -59,7 +59,7 @@ def _config_to_text(config: Config) -> str:
|
|||
|
||||
def _config_to_json(config: Config) -> str:
|
||||
"""Convert a Config object to a JSON formatted string"""
|
||||
return json.dumps(config_schema.dump(config).data, indent=4)
|
||||
return json.dumps(config_schema.dump(config), indent=4)
|
||||
|
||||
|
||||
def get_config() -> Response:
|
||||
|
|
|
@ -73,11 +73,10 @@ def patch_connection(connection_id, session, update_mask=None):
|
|||
Update a connection entry
|
||||
"""
|
||||
try:
|
||||
body = connection_schema.load(request.json, partial=True)
|
||||
data = connection_schema.load(request.json, partial=True)
|
||||
except ValidationError as err:
|
||||
# If validation get to here, it is extra field validation.
|
||||
raise BadRequest(detail=err.messages.get('_schema', [err.messages])[0])
|
||||
data = body.data
|
||||
raise BadRequest(detail=str(err.messages))
|
||||
non_update_fields = ['connection_id', 'conn_id']
|
||||
connection = session.query(Connection).filter_by(conn_id=connection_id).first()
|
||||
if connection is None:
|
||||
|
@ -107,10 +106,9 @@ def post_connection(session):
|
|||
"""
|
||||
body = request.json
|
||||
try:
|
||||
result = connection_schema.load(body)
|
||||
data = connection_schema.load(body)
|
||||
except ValidationError as err:
|
||||
raise BadRequest(detail=str(err.messages))
|
||||
data = result.data
|
||||
conn_id = data['conn_id']
|
||||
query = session.query(Connection)
|
||||
connection = query.filter_by(conn_id=conn_id).first()
|
||||
|
|
|
@ -51,4 +51,4 @@ def get_import_errors(session, limit, offset=None):
|
|||
import_errors = session.query(ImportError).order_by(ImportError.id).offset(offset).limit(limit).all()
|
||||
return import_error_collection_schema.dump(
|
||||
ImportErrorCollection(import_errors=import_errors, total_entries=total_entries)
|
||||
).data
|
||||
)
|
||||
|
|
|
@ -63,7 +63,7 @@ def get_pools(session, limit, offset=None):
|
|||
pools = session.query(Pool).order_by(Pool.id).offset(offset).limit(limit).all()
|
||||
return pool_collection_schema.dump(
|
||||
PoolCollection(pools=pools, total_entries=total_entries)
|
||||
).data
|
||||
)
|
||||
|
||||
|
||||
@provide_session
|
||||
|
@ -86,9 +86,9 @@ def patch_pool(pool_name, session, update_mask=None):
|
|||
raise NotFound(detail=f"Pool with name:'{pool_name}' not found")
|
||||
|
||||
try:
|
||||
patch_body = pool_schema.load(request.json).data
|
||||
patch_body = pool_schema.load(request.json)
|
||||
except ValidationError as err:
|
||||
raise BadRequest(detail=err.messages.get("_schema", [err.messages])[0])
|
||||
raise BadRequest(detail=str(err.messages))
|
||||
|
||||
if update_mask:
|
||||
update_mask = [i.strip() for i in update_mask]
|
||||
|
@ -127,9 +127,9 @@ def post_pool(session):
|
|||
raise BadRequest(detail=f"'{field}' is a required property")
|
||||
|
||||
try:
|
||||
post_body = pool_schema.load(request.json, session=session).data
|
||||
post_body = pool_schema.load(request.json, session=session)
|
||||
except ValidationError as err:
|
||||
raise BadRequest(detail=err.messages.get("_schema", [err.messages])[0])
|
||||
raise BadRequest(detail=str(err.messages))
|
||||
|
||||
pool = Pool(**post_body)
|
||||
try:
|
||||
|
|
|
@ -73,11 +73,11 @@ def patch_variable(variable_key: str, update_mask: Optional[List[str]] = None) -
|
|||
Update a variable by key
|
||||
"""
|
||||
try:
|
||||
var = variable_schema.load(request.json)
|
||||
data = variable_schema.load(request.json)
|
||||
except ValidationError as err:
|
||||
raise BadRequest("Invalid Variable schema", detail=str(err.messages))
|
||||
|
||||
if var.data["key"] != variable_key:
|
||||
if data["key"] != variable_key:
|
||||
raise BadRequest("Invalid post body", detail="key from request body doesn't match uri parameter")
|
||||
|
||||
if update_mask:
|
||||
|
@ -86,7 +86,7 @@ def patch_variable(variable_key: str, update_mask: Optional[List[str]] = None) -
|
|||
if "value" not in update_mask:
|
||||
raise BadRequest("No field to update")
|
||||
|
||||
Variable.set(var.data["key"], var.data["val"])
|
||||
Variable.set(data["key"], data["val"])
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
|
@ -95,8 +95,9 @@ def post_variables() -> Response:
|
|||
Create a variable
|
||||
"""
|
||||
try:
|
||||
var = variable_schema.load(request.json)
|
||||
data = variable_schema.load(request.json)
|
||||
|
||||
except ValidationError as err:
|
||||
raise BadRequest("Invalid Variable schema", detail=str(err.messages))
|
||||
Variable.set(var.data["key"], var.data["val"])
|
||||
return variable_schema.dump(var)
|
||||
Variable.set(data["key"], data["val"])
|
||||
return variable_schema.dump(data)
|
||||
|
|
|
@ -36,7 +36,7 @@ class CronExpression(typing.NamedTuple):
|
|||
class TimeDeltaSchema(Schema):
|
||||
"""Time delta schema"""
|
||||
|
||||
objectType = fields.Constant("TimeDelta", dump_to="__type")
|
||||
objectType = fields.Constant("TimeDelta", data_key="__type")
|
||||
days = fields.Integer()
|
||||
seconds = fields.Integer()
|
||||
microseconds = fields.Integer()
|
||||
|
@ -53,7 +53,7 @@ class TimeDeltaSchema(Schema):
|
|||
class RelativeDeltaSchema(Schema):
|
||||
"""Relative delta schema"""
|
||||
|
||||
objectType = fields.Constant("RelativeDelta", dump_to="__type")
|
||||
objectType = fields.Constant("RelativeDelta", data_key="__type")
|
||||
years = fields.Integer()
|
||||
months = fields.Integer()
|
||||
days = fields.Integer()
|
||||
|
@ -83,7 +83,7 @@ class RelativeDeltaSchema(Schema):
|
|||
class CronExpressionSchema(Schema):
|
||||
"""Cron expression schema"""
|
||||
|
||||
objectType = fields.Constant("CronExpression", dump_to="__type", required=True)
|
||||
objectType = fields.Constant("CronExpression", data_key="__type", required=True)
|
||||
value = fields.String(required=True)
|
||||
|
||||
@marshmallow.post_load
|
||||
|
|
|
@ -54,4 +54,4 @@ class Config(NamedTuple):
|
|||
sections: List[ConfigSection]
|
||||
|
||||
|
||||
config_schema = ConfigSchema(strict=True)
|
||||
config_schema = ConfigSchema()
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
# under the License.
|
||||
from typing import List, NamedTuple
|
||||
|
||||
from marshmallow import Schema, ValidationError, fields, validates_schema
|
||||
from marshmallow import Schema, fields
|
||||
from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
|
||||
|
||||
from airflow.models.connection import Connection
|
||||
|
@ -39,16 +39,6 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema):
|
|||
schema = auto_field()
|
||||
port = auto_field()
|
||||
|
||||
# Marshmallow 2 doesn't have support for excluding extra field
|
||||
# We will be able to remove this when we upgrade to marshmallow 3.
|
||||
# To remove it, we would need to set unknown=EXCLUDE in Meta
|
||||
@validates_schema(pass_original=True)
|
||||
def check_unknown_fields(self, data, original_data): # pylint: disable=unused-argument
|
||||
""" Validates unknown field """
|
||||
unknown = set(original_data) - set(self.fields)
|
||||
if unknown:
|
||||
raise ValidationError(f'Extra arguments passed: {list(unknown)}')
|
||||
|
||||
|
||||
class ConnectionSchema(ConnectionCollectionItemSchema): # pylint: disable=too-many-ancestors
|
||||
"""
|
||||
|
@ -71,6 +61,6 @@ class ConnectionCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
connection_schema = ConnectionSchema(strict=True)
|
||||
connection_collection_item_schema = ConnectionCollectionItemSchema(strict=True)
|
||||
connection_collection_schema = ConnectionCollectionSchema(strict=True)
|
||||
connection_schema = ConnectionSchema()
|
||||
connection_collection_item_schema = ConnectionCollectionItemSchema()
|
||||
connection_collection_schema = ConnectionCollectionSchema()
|
||||
|
|
|
@ -28,12 +28,12 @@ from airflow.models.dagrun import DagRun
|
|||
|
||||
class ConfObject(fields.Field):
|
||||
""" The conf field"""
|
||||
def _serialize(self, value, attr, obj):
|
||||
def _serialize(self, value, attr, obj, **kwargs):
|
||||
if not value:
|
||||
return {}
|
||||
return json.loads(value) if isinstance(value, str) else value
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
def _deserialize(self, value, attr, data, **kwargs):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
@ -49,7 +49,7 @@ class DAGRunSchema(SQLAlchemySchema):
|
|||
model = DagRun
|
||||
dateformat = 'iso'
|
||||
|
||||
run_id = auto_field(dump_to='dag_run_id', load_from='dag_run_id')
|
||||
run_id = auto_field(data_key='dag_run_id')
|
||||
dag_id = auto_field(dump_only=True)
|
||||
execution_date = auto_field()
|
||||
start_date = auto_field(dump_only=True)
|
||||
|
|
|
@ -39,7 +39,6 @@ class DAGSchema(SQLAlchemySchema):
|
|||
|
||||
class Meta:
|
||||
"""Meta"""
|
||||
|
||||
model = DagModel
|
||||
|
||||
dag_id = auto_field(dump_only=True)
|
||||
|
@ -56,7 +55,7 @@ class DAGSchema(SQLAlchemySchema):
|
|||
def get_owners(obj: DagModel):
|
||||
"""Convert owners attribute to DAG representation"""
|
||||
|
||||
if not obj.owners:
|
||||
if not getattr(obj, 'owners', None):
|
||||
return []
|
||||
return obj.owners.split(",")
|
||||
|
||||
|
|
|
@ -29,8 +29,6 @@ class ImportErrorSchema(SQLAlchemySchema):
|
|||
"""Meta"""
|
||||
|
||||
model = ImportError
|
||||
load_instance = True
|
||||
exclude = ("id", "stacktrace")
|
||||
|
||||
import_error_id = auto_field("id", dump_only=True)
|
||||
timestamp = auto_field(format="iso")
|
||||
|
@ -52,5 +50,5 @@ class ImportErrorCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
import_error_schema = ImportErrorSchema(strict=True)
|
||||
import_error_collection_schema = ImportErrorCollectionSchema(strict=True)
|
||||
import_error_schema = ImportErrorSchema()
|
||||
import_error_collection_schema = ImportErrorCollectionSchema()
|
||||
|
|
|
@ -30,8 +30,8 @@ class EventLogSchema(SQLAlchemySchema):
|
|||
""" Meta """
|
||||
model = Log
|
||||
|
||||
id = auto_field(dump_to='event_log_id', dump_only=True)
|
||||
dttm = auto_field(dump_to='when', dump_only=True)
|
||||
id = auto_field(data_key='event_log_id', dump_only=True)
|
||||
dttm = auto_field(data_key='when', dump_only=True)
|
||||
dag_id = auto_field(dump_only=True)
|
||||
task_id = auto_field(dump_only=True)
|
||||
event = auto_field(dump_only=True)
|
||||
|
@ -53,5 +53,5 @@ class EventLogCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
event_log_schema = EventLogSchema(strict=True)
|
||||
event_log_collection_schema = EventLogCollectionSchema(strict=True)
|
||||
event_log_schema = EventLogSchema()
|
||||
event_log_collection_schema = EventLogCollectionSchema()
|
||||
|
|
|
@ -32,4 +32,4 @@ class LogResponseObject(NamedTuple):
|
|||
continuation_token: str
|
||||
|
||||
|
||||
logs_schema = LogsSchema(strict=True)
|
||||
logs_schema = LogsSchema()
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
from typing import List, NamedTuple
|
||||
|
||||
from marshmallow import Schema, ValidationError, fields, validates_schema
|
||||
from marshmallow import Schema, fields
|
||||
from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
|
||||
|
||||
from airflow.models.pool import Pool
|
||||
|
@ -28,9 +28,7 @@ class PoolSchema(SQLAlchemySchema):
|
|||
|
||||
class Meta:
|
||||
"""Meta"""
|
||||
|
||||
model = Pool
|
||||
exclude = ("pool",)
|
||||
|
||||
name = auto_field("pool")
|
||||
slots = auto_field()
|
||||
|
@ -67,13 +65,6 @@ class PoolSchema(SQLAlchemySchema):
|
|||
"""
|
||||
return obj.open_slots()
|
||||
|
||||
@validates_schema(pass_original=True)
|
||||
def check_unknown_fields(self, data, original_data): # pylint: disable=unused-argument
|
||||
""" Validates unknown field """
|
||||
unknown = set(original_data) - set(self.fields)
|
||||
if unknown:
|
||||
raise ValidationError(f"Extra arguments passed: {list(unknown)}")
|
||||
|
||||
|
||||
class PoolCollection(NamedTuple):
|
||||
"""List of Pools with metadata"""
|
||||
|
@ -89,5 +80,5 @@ class PoolCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
pool_collection_schema = PoolCollectionSchema(strict=True)
|
||||
pool_schema = PoolSchema(strict=True)
|
||||
pool_collection_schema = PoolCollectionSchema()
|
||||
pool_schema = PoolSchema()
|
||||
|
|
|
@ -30,5 +30,5 @@ class VariableCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
variable_schema = VariableSchema(strict=True)
|
||||
variable_collection_schema = VariableCollectionSchema(strict=True)
|
||||
variable_schema = VariableSchema()
|
||||
variable_collection_schema = VariableCollectionSchema()
|
||||
|
|
|
@ -24,4 +24,4 @@ class VersionInfoSchema(Schema):
|
|||
git_version = fields.String(dump_only=True)
|
||||
|
||||
|
||||
version_info_schema = VersionInfoSchema(strict=True)
|
||||
version_info_schema = VersionInfoSchema()
|
||||
|
|
|
@ -58,6 +58,6 @@ class XComCollectionSchema(Schema):
|
|||
total_entries = fields.Int()
|
||||
|
||||
|
||||
xcom_schema = XComSchema(strict=True)
|
||||
xcom_collection_item_schema = XComCollectionItemSchema(strict=True)
|
||||
xcom_collection_schema = XComCollectionSchema(strict=True)
|
||||
xcom_schema = XComSchema()
|
||||
xcom_collection_item_schema = XComCollectionItemSchema()
|
||||
xcom_collection_schema = XComCollectionSchema()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Editable install with no version control (apache-airflow==2.0.0.dev0)
|
||||
Authlib==0.14.3
|
||||
Babel==2.8.0
|
||||
Flask-AppBuilder==2.3.4
|
||||
Flask-AppBuilder==3.0.0
|
||||
Flask-Babel==1.0.0
|
||||
Flask-Bcrypt==0.7.1
|
||||
Flask-Caching==1.3.3
|
||||
|
@ -27,9 +27,9 @@ PySmbClient==0.1.5
|
|||
PyYAML==5.3.1
|
||||
Pygments==2.6.1
|
||||
SQLAlchemy-JSONField==0.9.0
|
||||
SQLAlchemy-Utils==0.36.6
|
||||
SQLAlchemy-Utils==0.36.7
|
||||
SQLAlchemy==1.3.18
|
||||
Sphinx==3.1.1
|
||||
Sphinx==3.1.2
|
||||
Unidecode==1.1.1
|
||||
WTForms==2.3.1
|
||||
Werkzeug==0.16.1
|
||||
|
@ -41,7 +41,7 @@ amqp==2.6.0
|
|||
analytics-python==1.2.9
|
||||
ansiwrap==0.8.4
|
||||
apipkg==1.5
|
||||
apispec==1.3.3
|
||||
apispec==3.3.1
|
||||
appdirs==1.4.4
|
||||
argcomplete==1.11.1
|
||||
asn1crypto==1.3.0
|
||||
|
@ -61,7 +61,7 @@ azure-mgmt-containerinstance==1.5.0
|
|||
azure-mgmt-datalake-nspkg==3.0.1
|
||||
azure-mgmt-datalake-store==0.5.0
|
||||
azure-mgmt-nspkg==3.0.2
|
||||
azure-mgmt-resource==10.0.0
|
||||
azure-mgmt-resource==10.1.0
|
||||
azure-nspkg==3.0.2
|
||||
azure-storage-blob==2.1.0
|
||||
azure-storage-common==2.1.0
|
||||
|
@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
|
|||
billiard==3.6.3.0
|
||||
black==19.10b0
|
||||
blinker==1.4
|
||||
boto3==1.14.14
|
||||
boto3==1.14.16
|
||||
boto==2.49.0
|
||||
botocore==1.17.14
|
||||
botocore==1.17.16
|
||||
bowler==0.8.0
|
||||
cached-property==1.5.1
|
||||
cachetools==4.1.1
|
||||
|
@ -100,8 +100,8 @@ croniter==0.3.34
|
|||
cryptography==2.9.2
|
||||
curlify==2.2.1
|
||||
cx-Oracle==8.0.0
|
||||
dask==2.19.0
|
||||
datadog==0.37.1
|
||||
dask==2.20.0
|
||||
datadog==0.38.0
|
||||
decorator==4.4.2
|
||||
defusedxml==0.6.0
|
||||
dill==0.3.2
|
||||
|
@ -128,7 +128,7 @@ flake8-colors==0.1.6
|
|||
flake8==3.8.3
|
||||
flaky==3.6.1
|
||||
flask-swagger==0.2.13
|
||||
flower==0.9.4
|
||||
flower==0.9.5
|
||||
freezegun==0.3.15
|
||||
fsspec==0.7.4
|
||||
funcsigs==1.0.2
|
||||
|
@ -178,16 +178,15 @@ gunicorn==19.10.0
|
|||
hdfs==2.5.8
|
||||
hmsclient==0.1.1
|
||||
httplib2==0.18.1
|
||||
humanize==0.5.1
|
||||
humanize==2.5.0
|
||||
hvac==0.10.4
|
||||
identify==1.4.20
|
||||
identify==1.4.21
|
||||
idna-ssl==1.1.0
|
||||
idna==2.10
|
||||
ijson==2.6.1
|
||||
imagesize==1.2.0
|
||||
immutables==0.14
|
||||
importlib-metadata==1.7.0
|
||||
importlib-resources==2.0.1
|
||||
importlib-resources==3.0.0
|
||||
inflection==0.5.0
|
||||
ipdb==0.13.3
|
||||
ipython-genutils==0.2.0
|
||||
|
@ -214,9 +213,9 @@ lazy-object-proxy==1.5.0
|
|||
ldap3==2.7
|
||||
lockfile==0.12.2
|
||||
marshmallow-enum==1.5.1
|
||||
marshmallow-oneofschema==1.0.6
|
||||
marshmallow-oneofschema==2.0.1
|
||||
marshmallow-sqlalchemy==0.23.1
|
||||
marshmallow==2.21.0
|
||||
marshmallow==3.6.1
|
||||
mccabe==0.6.1
|
||||
mock==4.0.2
|
||||
mongomock==3.19.0
|
||||
|
@ -260,9 +259,10 @@ pickleshare==0.7.5
|
|||
pinotdb==0.1.1
|
||||
pipdeptree==1.0.0
|
||||
pluggy==0.13.1
|
||||
pre-commit==2.5.1
|
||||
pre-commit==2.6.0
|
||||
presto-python-client==0.7.0
|
||||
prison==0.1.3
|
||||
prometheus-client==0.8.0
|
||||
prompt-toolkit==3.0.5
|
||||
protobuf==3.12.2
|
||||
psutil==5.7.0
|
||||
|
@ -275,7 +275,7 @@ pyarrow==0.17.1
|
|||
pyasn1-modules==0.2.8
|
||||
pyasn1==0.4.8
|
||||
pycodestyle==2.6.0
|
||||
pycountry==19.8.18
|
||||
pycountry==20.7.3
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
pydata-google-auth==1.1.0
|
||||
|
@ -328,7 +328,7 @@ sasl==0.2.1
|
|||
semver==2.10.2
|
||||
sendgrid==6.4.1
|
||||
sentinels==1.0.0
|
||||
sentry-sdk==0.15.1
|
||||
sentry-sdk==0.16.0
|
||||
setproctitle==1.1.10
|
||||
sh==1.13.1
|
||||
simple-salesforce==1.1.0
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Editable install with no version control (apache-airflow==2.0.0.dev0)
|
||||
Authlib==0.14.3
|
||||
Babel==2.8.0
|
||||
Flask-AppBuilder==2.3.4
|
||||
Flask-AppBuilder==3.0.0
|
||||
Flask-Babel==1.0.0
|
||||
Flask-Bcrypt==0.7.1
|
||||
Flask-Caching==1.3.3
|
||||
|
@ -27,9 +27,9 @@ PySmbClient==0.1.5
|
|||
PyYAML==5.3.1
|
||||
Pygments==2.6.1
|
||||
SQLAlchemy-JSONField==0.9.0
|
||||
SQLAlchemy-Utils==0.36.6
|
||||
SQLAlchemy-Utils==0.36.7
|
||||
SQLAlchemy==1.3.18
|
||||
Sphinx==3.1.1
|
||||
Sphinx==3.1.2
|
||||
Unidecode==1.1.1
|
||||
WTForms==2.3.1
|
||||
Werkzeug==0.16.1
|
||||
|
@ -41,7 +41,7 @@ amqp==2.6.0
|
|||
analytics-python==1.2.9
|
||||
ansiwrap==0.8.4
|
||||
apipkg==1.5
|
||||
apispec==1.3.3
|
||||
apispec==3.3.1
|
||||
appdirs==1.4.4
|
||||
argcomplete==1.11.1
|
||||
asn1crypto==1.3.0
|
||||
|
@ -61,7 +61,7 @@ azure-mgmt-containerinstance==1.5.0
|
|||
azure-mgmt-datalake-nspkg==3.0.1
|
||||
azure-mgmt-datalake-store==0.5.0
|
||||
azure-mgmt-nspkg==3.0.2
|
||||
azure-mgmt-resource==10.0.0
|
||||
azure-mgmt-resource==10.1.0
|
||||
azure-nspkg==3.0.2
|
||||
azure-storage-blob==2.1.0
|
||||
azure-storage-common==2.1.0
|
||||
|
@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
|
|||
billiard==3.6.3.0
|
||||
black==19.10b0
|
||||
blinker==1.4
|
||||
boto3==1.14.14
|
||||
boto3==1.14.16
|
||||
boto==2.49.0
|
||||
botocore==1.17.14
|
||||
botocore==1.17.16
|
||||
bowler==0.8.0
|
||||
cached-property==1.5.1
|
||||
cachetools==4.1.1
|
||||
|
@ -99,8 +99,8 @@ croniter==0.3.34
|
|||
cryptography==2.9.2
|
||||
curlify==2.2.1
|
||||
cx-Oracle==8.0.0
|
||||
dask==2.19.0
|
||||
datadog==0.37.1
|
||||
dask==2.20.0
|
||||
datadog==0.38.0
|
||||
decorator==4.4.2
|
||||
defusedxml==0.6.0
|
||||
dill==0.3.2
|
||||
|
@ -127,7 +127,7 @@ flake8-colors==0.1.6
|
|||
flake8==3.8.3
|
||||
flaky==3.6.1
|
||||
flask-swagger==0.2.13
|
||||
flower==0.9.4
|
||||
flower==0.9.5
|
||||
freezegun==0.3.15
|
||||
fsspec==0.7.4
|
||||
funcsigs==1.0.2
|
||||
|
@ -177,11 +177,10 @@ gunicorn==19.10.0
|
|||
hdfs==2.5.8
|
||||
hmsclient==0.1.1
|
||||
httplib2==0.18.1
|
||||
humanize==0.5.1
|
||||
humanize==2.5.0
|
||||
hvac==0.10.4
|
||||
identify==1.4.20
|
||||
identify==1.4.21
|
||||
idna==2.10
|
||||
ijson==2.6.1
|
||||
imagesize==1.2.0
|
||||
importlib-metadata==1.7.0
|
||||
inflection==0.5.0
|
||||
|
@ -210,9 +209,9 @@ lazy-object-proxy==1.5.0
|
|||
ldap3==2.7
|
||||
lockfile==0.12.2
|
||||
marshmallow-enum==1.5.1
|
||||
marshmallow-oneofschema==1.0.6
|
||||
marshmallow-oneofschema==2.0.1
|
||||
marshmallow-sqlalchemy==0.23.1
|
||||
marshmallow==2.21.0
|
||||
marshmallow==3.6.1
|
||||
mccabe==0.6.1
|
||||
mock==4.0.2
|
||||
mongomock==3.19.0
|
||||
|
@ -255,9 +254,10 @@ pickleshare==0.7.5
|
|||
pinotdb==0.1.1
|
||||
pipdeptree==1.0.0
|
||||
pluggy==0.13.1
|
||||
pre-commit==2.5.1
|
||||
pre-commit==2.6.0
|
||||
presto-python-client==0.7.0
|
||||
prison==0.1.3
|
||||
prometheus-client==0.8.0
|
||||
prompt-toolkit==3.0.5
|
||||
protobuf==3.12.2
|
||||
psutil==5.7.0
|
||||
|
@ -270,7 +270,7 @@ pyarrow==0.17.1
|
|||
pyasn1-modules==0.2.8
|
||||
pyasn1==0.4.8
|
||||
pycodestyle==2.6.0
|
||||
pycountry==19.8.18
|
||||
pycountry==20.7.3
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
pydata-google-auth==1.1.0
|
||||
|
@ -323,7 +323,7 @@ sasl==0.2.1
|
|||
semver==2.10.2
|
||||
sendgrid==6.4.1
|
||||
sentinels==1.0.0
|
||||
sentry-sdk==0.15.1
|
||||
sentry-sdk==0.16.0
|
||||
setproctitle==1.1.10
|
||||
sh==1.13.1
|
||||
simple-salesforce==1.1.0
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Editable install with no version control (apache-airflow==2.0.0.dev0)
|
||||
Authlib==0.14.3
|
||||
Babel==2.8.0
|
||||
Flask-AppBuilder==2.3.4
|
||||
Flask-AppBuilder==3.0.0
|
||||
Flask-Babel==1.0.0
|
||||
Flask-Bcrypt==0.7.1
|
||||
Flask-Caching==1.3.3
|
||||
|
@ -27,9 +27,9 @@ PySmbClient==0.1.5
|
|||
PyYAML==5.3.1
|
||||
Pygments==2.6.1
|
||||
SQLAlchemy-JSONField==0.9.0
|
||||
SQLAlchemy-Utils==0.36.6
|
||||
SQLAlchemy-Utils==0.36.7
|
||||
SQLAlchemy==1.3.18
|
||||
Sphinx==3.1.1
|
||||
Sphinx==3.1.2
|
||||
Unidecode==1.1.1
|
||||
WTForms==2.3.1
|
||||
Werkzeug==0.16.1
|
||||
|
@ -41,7 +41,7 @@ amqp==2.6.0
|
|||
analytics-python==1.2.9
|
||||
ansiwrap==0.8.4
|
||||
apipkg==1.5
|
||||
apispec==1.3.3
|
||||
apispec==3.3.1
|
||||
appdirs==1.4.4
|
||||
argcomplete==1.11.1
|
||||
asn1crypto==1.3.0
|
||||
|
@ -61,7 +61,7 @@ azure-mgmt-containerinstance==1.5.0
|
|||
azure-mgmt-datalake-nspkg==3.0.1
|
||||
azure-mgmt-datalake-store==0.5.0
|
||||
azure-mgmt-nspkg==3.0.2
|
||||
azure-mgmt-resource==10.0.0
|
||||
azure-mgmt-resource==10.1.0
|
||||
azure-nspkg==3.0.2
|
||||
azure-storage-blob==2.1.0
|
||||
azure-storage-common==2.1.0
|
||||
|
@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
|
|||
billiard==3.6.3.0
|
||||
black==19.10b0
|
||||
blinker==1.4
|
||||
boto3==1.14.14
|
||||
boto3==1.14.16
|
||||
boto==2.49.0
|
||||
botocore==1.17.14
|
||||
botocore==1.17.16
|
||||
bowler==0.8.0
|
||||
cached-property==1.5.1
|
||||
cachetools==4.1.1
|
||||
|
@ -99,8 +99,8 @@ croniter==0.3.34
|
|||
cryptography==2.9.2
|
||||
curlify==2.2.1
|
||||
cx-Oracle==8.0.0
|
||||
dask==2.19.0
|
||||
datadog==0.37.1
|
||||
dask==2.20.0
|
||||
datadog==0.38.0
|
||||
decorator==4.4.2
|
||||
defusedxml==0.6.0
|
||||
dill==0.3.2
|
||||
|
@ -127,7 +127,7 @@ flake8-colors==0.1.6
|
|||
flake8==3.8.3
|
||||
flaky==3.6.1
|
||||
flask-swagger==0.2.13
|
||||
flower==0.9.4
|
||||
flower==0.9.5
|
||||
freezegun==0.3.15
|
||||
fsspec==0.7.4
|
||||
funcsigs==1.0.2
|
||||
|
@ -177,9 +177,9 @@ gunicorn==19.10.0
|
|||
hdfs==2.5.8
|
||||
hmsclient==0.1.1
|
||||
httplib2==0.18.1
|
||||
humanize==0.5.1
|
||||
humanize==2.5.0
|
||||
hvac==0.10.4
|
||||
identify==1.4.20
|
||||
identify==1.4.21
|
||||
idna==2.10
|
||||
ijson==2.6.1
|
||||
imagesize==1.2.0
|
||||
|
@ -210,9 +210,9 @@ lazy-object-proxy==1.5.0
|
|||
ldap3==2.7
|
||||
lockfile==0.12.2
|
||||
marshmallow-enum==1.5.1
|
||||
marshmallow-oneofschema==1.0.6
|
||||
marshmallow-oneofschema==2.0.1
|
||||
marshmallow-sqlalchemy==0.23.1
|
||||
marshmallow==2.21.0
|
||||
marshmallow==3.6.1
|
||||
mccabe==0.6.1
|
||||
mock==4.0.2
|
||||
mongomock==3.19.0
|
||||
|
@ -255,9 +255,10 @@ pickleshare==0.7.5
|
|||
pinotdb==0.1.1
|
||||
pipdeptree==1.0.0
|
||||
pluggy==0.13.1
|
||||
pre-commit==2.5.1
|
||||
pre-commit==2.6.0
|
||||
presto-python-client==0.7.0
|
||||
prison==0.1.3
|
||||
prometheus-client==0.8.0
|
||||
prompt-toolkit==3.0.5
|
||||
protobuf==3.12.2
|
||||
psutil==5.7.0
|
||||
|
@ -270,7 +271,7 @@ pyarrow==0.17.1
|
|||
pyasn1-modules==0.2.8
|
||||
pyasn1==0.4.8
|
||||
pycodestyle==2.6.0
|
||||
pycountry==19.8.18
|
||||
pycountry==20.7.3
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
pydata-google-auth==1.1.0
|
||||
|
@ -322,7 +323,7 @@ sasl==0.2.1
|
|||
semver==2.10.2
|
||||
sendgrid==6.4.1
|
||||
sentinels==1.0.0
|
||||
sentry-sdk==0.15.1
|
||||
sentry-sdk==0.16.0
|
||||
setproctitle==1.1.10
|
||||
sh==1.13.1
|
||||
simple-salesforce==1.1.0
|
||||
|
|
|
@ -1 +1 @@
|
|||
e50c855fa7de97b2eca8152389400a5b /opt/airflow/setup.py
|
||||
2f6a2d89bfb00fa5bc04bc00f2d488e7 /opt/airflow/setup.py
|
||||
|
|
|
@ -1 +1 @@
|
|||
e50c855fa7de97b2eca8152389400a5b /opt/airflow/setup.py
|
||||
2f6a2d89bfb00fa5bc04bc00f2d488e7 /opt/airflow/setup.py
|
||||
|
|
|
@ -1 +1 @@
|
|||
e50c855fa7de97b2eca8152389400a5b /opt/airflow/setup.py
|
||||
2f6a2d89bfb00fa5bc04bc00f2d488e7 /opt/airflow/setup.py
|
||||
|
|
11
setup.py
11
setup.py
|
@ -194,7 +194,7 @@ azure = [
|
|||
'azure-cosmos>=3.0.1,<4',
|
||||
'azure-datalake-store>=0.0.45',
|
||||
'azure-kusto-data>=0.0.43,<0.1',
|
||||
'azure-mgmt-containerinstance>=1.5.0',
|
||||
'azure-mgmt-containerinstance>=1.5.0,<2.0',
|
||||
'azure-mgmt-datalake-store>=0.5.0',
|
||||
'azure-mgmt-resource>=2.2.0',
|
||||
'azure-storage>=0.34.0, <0.37.0',
|
||||
|
@ -215,7 +215,8 @@ cloudant = [
|
|||
'cloudant>=2.0',
|
||||
]
|
||||
dask = [
|
||||
'distributed>=2.11.1, <3',
|
||||
'cloudpickle>=1.4.1, <1.5.0',
|
||||
'distributed>=2.11.1, <2.20'
|
||||
]
|
||||
databricks = [
|
||||
'requests>=2.20.0, <3',
|
||||
|
@ -588,7 +589,7 @@ EXTRAS_REQUIREMENTS: Dict[str, Iterable[str]] = {
|
|||
'jdbc': jdbc,
|
||||
'jira': jira,
|
||||
'kerberos': kerberos,
|
||||
'kubernetes': kubernetes, # TODO: remove this in Airflow 2.1
|
||||
'kubernetes': kubernetes, # TODO: remove this in Airflow 2.1
|
||||
'ldap': ldap,
|
||||
"microsoft.azure": azure,
|
||||
"microsoft.mssql": mssql,
|
||||
|
@ -694,7 +695,7 @@ INSTALL_REQUIREMENTS = [
|
|||
'cryptography>=0.9.3',
|
||||
'dill>=0.2.2, <0.4',
|
||||
'flask>=1.1.0, <2.0',
|
||||
'flask-appbuilder~=2.3.4',
|
||||
'flask-appbuilder>2.3.4,~=3.0',
|
||||
'flask-caching>=1.3.3, <1.4.0',
|
||||
'flask-login>=0.3, <0.5',
|
||||
'flask-swagger==0.2.13',
|
||||
|
@ -710,7 +711,7 @@ INSTALL_REQUIREMENTS = [
|
|||
'lockfile>=0.12.2',
|
||||
'markdown>=2.5.2, <3.0',
|
||||
'markupsafe>=1.1.1, <2.0',
|
||||
'marshmallow-oneofschema<2',
|
||||
'marshmallow-oneofschema>=2.0.1',
|
||||
'pandas>=0.17.1, <2.0',
|
||||
'pendulum~=2.0',
|
||||
'pep562~=1.0;python_version<"3.7"',
|
||||
|
|
|
@ -385,7 +385,7 @@ class TestPatchConnection(TestConnectionEndpoint):
|
|||
"connection_id": "test-connection-id",
|
||||
"conn_type": "test-type",
|
||||
"extras": "{}", # extras not a known field e.g typo
|
||||
}, "Extra arguments passed: ['extras']"
|
||||
}, "extras"
|
||||
),
|
||||
(
|
||||
{
|
||||
|
@ -393,7 +393,7 @@ class TestPatchConnection(TestConnectionEndpoint):
|
|||
"conn_type": "test-type",
|
||||
"invalid_field": "invalid field", # unknown field
|
||||
"_password": "{}", # _password not a known field
|
||||
}, "Extra arguments passed:"
|
||||
}, "_password"
|
||||
),
|
||||
]
|
||||
)
|
||||
|
|
|
@ -82,6 +82,7 @@ class TestGetDagDetails(TestDagEndpoint):
|
|||
'is_paused': None,
|
||||
'is_subdag': False,
|
||||
'orientation': 'LR',
|
||||
'owners': [],
|
||||
'schedule_interval': {
|
||||
'__type': 'TimeDelta',
|
||||
'days': 1,
|
||||
|
@ -115,6 +116,7 @@ class TestGetDagDetails(TestDagEndpoint):
|
|||
'is_paused': None,
|
||||
'is_subdag': False,
|
||||
'orientation': 'LR',
|
||||
'owners': [],
|
||||
'schedule_interval': {
|
||||
'__type': 'TimeDelta',
|
||||
'days': 1,
|
||||
|
|
|
@ -244,7 +244,7 @@ class TestPostPool(TestBasePoolEndpoints):
|
|||
(
|
||||
"for extra fields",
|
||||
{"name": "invalid_pool", "slots": 3, "extra_field_1": "extra"},
|
||||
"Extra arguments passed: ['extra_field_1']",
|
||||
"{'extra_field_1': ['Unknown field.']}",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
@ -292,7 +292,7 @@ class TestPatchPool(TestBasePoolEndpoints):
|
|||
("'slots' is a required property", {"name": "test_pool_a"}),
|
||||
# Extra properties
|
||||
(
|
||||
"Extra arguments passed: ['extra_field']",
|
||||
"{'extra_field': ['Unknown field.']}",
|
||||
{"name": "test_pool_a", "slots": 3, "extra_field": "extra"},
|
||||
),
|
||||
]
|
||||
|
|
|
@ -181,5 +181,5 @@ class TestPostVariables(TestVariableEndpoint):
|
|||
"title": "Invalid Variable schema",
|
||||
"status": 400,
|
||||
"type": "about:blank",
|
||||
"detail": "{'value': ['Missing data for required field.']}",
|
||||
"detail": "{'value': ['Missing data for required field.'], 'v': ['Unknown field.']}",
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ class TestTimeDeltaSchema(unittest.TestCase):
|
|||
result = schema_instance.dump(instance)
|
||||
self.assertEqual(
|
||||
{"__type": "TimeDelta", "days": 12, "seconds": 0, "microseconds": 0},
|
||||
result.data
|
||||
result
|
||||
)
|
||||
|
||||
def test_should_deserialize(self):
|
||||
|
@ -40,7 +40,7 @@ class TestTimeDeltaSchema(unittest.TestCase):
|
|||
schema_instance = TimeDeltaSchema()
|
||||
result = schema_instance.load(instance)
|
||||
expected_instance = datetime.timedelta(days=12)
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
||||
|
||||
class TestRelativeDeltaSchema(unittest.TestCase):
|
||||
|
@ -67,7 +67,7 @@ class TestRelativeDeltaSchema(unittest.TestCase):
|
|||
"year": None,
|
||||
"years": 0,
|
||||
},
|
||||
result.data,
|
||||
result,
|
||||
)
|
||||
|
||||
def test_should_deserialize(self):
|
||||
|
@ -75,7 +75,7 @@ class TestRelativeDeltaSchema(unittest.TestCase):
|
|||
schema_instance = RelativeDeltaSchema()
|
||||
result = schema_instance.load(instance)
|
||||
expected_instance = relativedelta.relativedelta(days=+12)
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
||||
|
||||
class TestCronExpressionSchema(unittest.TestCase):
|
||||
|
@ -84,7 +84,7 @@ class TestCronExpressionSchema(unittest.TestCase):
|
|||
schema_instance = CronExpressionSchema()
|
||||
result = schema_instance.load(instance)
|
||||
expected_instance = CronExpression("5 4 * * *")
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
||||
|
||||
class TestScheduleIntervalSchema(unittest.TestCase):
|
||||
|
@ -94,7 +94,7 @@ class TestScheduleIntervalSchema(unittest.TestCase):
|
|||
result = schema_instance.dump(instance)
|
||||
self.assertEqual(
|
||||
{"__type": "TimeDelta", "days": 12, "seconds": 0, "microseconds": 0},
|
||||
result.data
|
||||
result
|
||||
)
|
||||
|
||||
def test_should_deserialize_timedelta(self):
|
||||
|
@ -102,7 +102,7 @@ class TestScheduleIntervalSchema(unittest.TestCase):
|
|||
schema_instance = ScheduleIntervalSchema()
|
||||
result = schema_instance.load(instance)
|
||||
expected_instance = datetime.timedelta(days=12)
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
||||
def test_should_serialize_relative_delta(self):
|
||||
instance = relativedelta.relativedelta(days=+12)
|
||||
|
@ -127,7 +127,7 @@ class TestScheduleIntervalSchema(unittest.TestCase):
|
|||
"year": None,
|
||||
"years": 0,
|
||||
},
|
||||
result.data,
|
||||
result,
|
||||
)
|
||||
|
||||
def test_should_deserialize_relative_delta(self):
|
||||
|
@ -135,11 +135,11 @@ class TestScheduleIntervalSchema(unittest.TestCase):
|
|||
schema_instance = ScheduleIntervalSchema()
|
||||
result = schema_instance.load(instance)
|
||||
expected_instance = relativedelta.relativedelta(days=+12)
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
||||
def test_should_serialize_cron_expresssion(self):
|
||||
instance = "5 4 * * *"
|
||||
schema_instance = ScheduleIntervalSchema()
|
||||
result = schema_instance.dump(instance)
|
||||
expected_instance = {"__type": "CronExpression", "value": "5 4 * * *"}
|
||||
self.assertEqual(expected_instance, result.data)
|
||||
self.assertEqual(expected_instance, result)
|
||||
|
|
|
@ -55,4 +55,4 @@ class TestConfigSchema:
|
|||
},
|
||||
]
|
||||
}
|
||||
assert result.data == expected
|
||||
assert result == expected
|
||||
|
|
|
@ -51,7 +51,7 @@ class TestConnectionCollectionItemSchema(unittest.TestCase):
|
|||
connection_model = session.query(Connection).first()
|
||||
deserialized_connection = connection_collection_item_schema.dump(connection_model)
|
||||
self.assertEqual(
|
||||
deserialized_connection[0],
|
||||
deserialized_connection,
|
||||
{
|
||||
'connection_id': "mysql_default",
|
||||
'conn_type': 'mysql',
|
||||
|
@ -79,7 +79,7 @@ class TestConnectionCollectionItemSchema(unittest.TestCase):
|
|||
result_2 = connection_collection_item_schema.load(connection_dump_2)
|
||||
|
||||
self.assertEqual(
|
||||
result_1[0],
|
||||
result_1,
|
||||
{
|
||||
'conn_id': "mysql_default_1",
|
||||
'conn_type': 'mysql',
|
||||
|
@ -90,7 +90,7 @@ class TestConnectionCollectionItemSchema(unittest.TestCase):
|
|||
}
|
||||
)
|
||||
self.assertEqual(
|
||||
result_2[0],
|
||||
result_2,
|
||||
{
|
||||
'conn_id': "mysql_default_2",
|
||||
'conn_type': "postgres",
|
||||
|
@ -136,7 +136,7 @@ class TestConnectionCollectionSchema(unittest.TestCase):
|
|||
)
|
||||
deserialized_connections = connection_collection_schema.dump(instance)
|
||||
self.assertEqual(
|
||||
deserialized_connections[0],
|
||||
deserialized_connections,
|
||||
{
|
||||
'connections': [
|
||||
{
|
||||
|
@ -187,7 +187,7 @@ class TestConnectionSchema(unittest.TestCase):
|
|||
connection_model = session.query(Connection).first()
|
||||
deserialized_connection = connection_schema.dump(connection_model)
|
||||
self.assertEqual(
|
||||
deserialized_connection[0],
|
||||
deserialized_connection,
|
||||
{
|
||||
'connection_id': "mysql_default",
|
||||
'conn_type': 'mysql',
|
||||
|
@ -211,7 +211,7 @@ class TestConnectionSchema(unittest.TestCase):
|
|||
}
|
||||
result = connection_schema.load(den)
|
||||
self.assertEqual(
|
||||
result[0],
|
||||
result,
|
||||
{
|
||||
'conn_id': "mysql_default",
|
||||
'conn_type': 'mysql',
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
import unittest
|
||||
|
||||
from dateutil.parser import parse
|
||||
from marshmallow import ValidationError
|
||||
|
||||
from airflow.api_connexion.schemas.dag_run_schema import (
|
||||
DAGRunCollection, dagrun_collection_schema, dagrun_schema,
|
||||
|
@ -55,7 +56,7 @@ class TestDAGRunSchema(TestDAGRunBase):
|
|||
deserialized_dagrun = dagrun_schema.dump(dagrun_model)
|
||||
|
||||
self.assertEqual(
|
||||
deserialized_dagrun[0],
|
||||
deserialized_dagrun,
|
||||
{
|
||||
'dag_id': None,
|
||||
'dag_run_id': 'my-dag-run',
|
||||
|
@ -73,19 +74,15 @@ class TestDAGRunSchema(TestDAGRunBase):
|
|||
# and conf are loaded.
|
||||
# dag_run_id should be loaded as run_id
|
||||
serialized_dagrun = {
|
||||
'dag_id': None,
|
||||
'dag_run_id': 'my-dag-run',
|
||||
'end_date': None,
|
||||
'state': 'failed',
|
||||
'execution_date': self.default_time,
|
||||
'external_trigger': True,
|
||||
'start_date': self.default_time,
|
||||
'conf': '{"start": "stop"}'
|
||||
}
|
||||
|
||||
result = dagrun_schema.load(serialized_dagrun)
|
||||
self.assertEqual(
|
||||
result.data,
|
||||
result,
|
||||
{
|
||||
'run_id': 'my-dag-run',
|
||||
'execution_date': parse(self.default_time),
|
||||
|
@ -95,27 +92,19 @@ class TestDAGRunSchema(TestDAGRunBase):
|
|||
)
|
||||
|
||||
def test_deserialize_2(self):
|
||||
# Invalid state field should return None
|
||||
# loading dump_only field raises
|
||||
serialized_dagrun = {
|
||||
'dag_id': None,
|
||||
'dag_run_id': 'my-dag-run',
|
||||
'end_date': None,
|
||||
'state': 'faileds',
|
||||
'state': 'failed',
|
||||
'execution_date': self.default_time,
|
||||
'external_trigger': True,
|
||||
'start_date': self.default_time,
|
||||
'conf': {"start": "stop"}
|
||||
}
|
||||
|
||||
result = dagrun_schema.load(serialized_dagrun)
|
||||
self.assertEqual(
|
||||
result.data,
|
||||
{
|
||||
'run_id': 'my-dag-run',
|
||||
'execution_date': parse(self.default_time),
|
||||
'conf': {"start": "stop"}
|
||||
}
|
||||
)
|
||||
with self.assertRaises(ValidationError):
|
||||
dagrun_schema.load(serialized_dagrun)
|
||||
|
||||
|
||||
class TestDagRunCollection(TestDAGRunBase):
|
||||
|
@ -142,7 +131,7 @@ class TestDagRunCollection(TestDAGRunBase):
|
|||
total_entries=2)
|
||||
deserialized_dagruns = dagrun_collection_schema.dump(instance)
|
||||
self.assertEqual(
|
||||
deserialized_dagruns.data,
|
||||
deserialized_dagruns,
|
||||
{
|
||||
'dag_runs': [
|
||||
{
|
||||
|
|
|
@ -51,7 +51,7 @@ class TestDagSchema(unittest.TestCase):
|
|||
"schedule_interval": {"__type": "CronExpression", "value": "5 4 * * *"},
|
||||
"tags": [{"name": "tag-1"}, {"name": "tag-2"}],
|
||||
},
|
||||
serialized_dag.data,
|
||||
serialized_dag,
|
||||
)
|
||||
|
||||
|
||||
|
@ -89,7 +89,7 @@ class TestDAGCollectionSchema(unittest.TestCase):
|
|||
],
|
||||
"total_entries": 2,
|
||||
},
|
||||
schema.dump(instance).data,
|
||||
schema.dump(instance),
|
||||
)
|
||||
|
||||
|
||||
|
@ -115,9 +115,10 @@ class TestDAGDetailSchema:
|
|||
'is_paused': None,
|
||||
'is_subdag': False,
|
||||
'orientation': 'LR',
|
||||
'owners': [],
|
||||
'schedule_interval': {'__type': 'TimeDelta', 'days': 1, 'seconds': 0, 'microseconds': 0},
|
||||
'start_date': '2020-06-19T00:00:00+00:00',
|
||||
'tags': None,
|
||||
'timezone': "Timezone('UTC')"
|
||||
}
|
||||
assert schema.dump(dag).data == expected
|
||||
assert schema.dump(dag) == expected
|
||||
|
|
|
@ -44,7 +44,7 @@ class TestErrorSchema(TestErrorSchemaBase):
|
|||
)
|
||||
session.add(import_error)
|
||||
session.commit()
|
||||
serialized_data = import_error_schema.dump(import_error).data
|
||||
serialized_data = import_error_schema.dump(import_error)
|
||||
serialized_data["import_error_id"] = 1
|
||||
self.assertEqual(
|
||||
{
|
||||
|
@ -76,14 +76,12 @@ class TestErrorCollectionSchema(TestErrorSchemaBase):
|
|||
session.commit()
|
||||
query = session.query(ImportError)
|
||||
query_list = query.all()
|
||||
serialized_data = (
|
||||
import_error_collection_schema.dump(
|
||||
ImportErrorCollection(import_errors=query_list, total_entries=2)
|
||||
).data,
|
||||
serialized_data = import_error_collection_schema.dump(
|
||||
ImportErrorCollection(import_errors=query_list, total_entries=2)
|
||||
)
|
||||
# To maintain consistency in the key sequence accross the db in tests
|
||||
serialized_data[0]["import_errors"][0]["import_error_id"] = 1
|
||||
serialized_data[0]["import_errors"][1]["import_error_id"] = 2
|
||||
serialized_data["import_errors"][0]["import_error_id"] = 1
|
||||
serialized_data["import_errors"][1]["import_error_id"] = 2
|
||||
self.assertEqual(
|
||||
{
|
||||
"import_errors": [
|
||||
|
@ -102,5 +100,5 @@ class TestErrorCollectionSchema(TestErrorSchemaBase):
|
|||
],
|
||||
"total_entries": 2,
|
||||
},
|
||||
serialized_data[0],
|
||||
serialized_data,
|
||||
)
|
||||
|
|
|
@ -60,7 +60,7 @@ class TestEventLogSchema(TestEventLogSchemaBase):
|
|||
log_model = session.query(Log).first()
|
||||
deserialized_log = event_log_schema.dump(log_model)
|
||||
self.assertEqual(
|
||||
deserialized_log[0],
|
||||
deserialized_log,
|
||||
{
|
||||
"event_log_id": event_log_model.id,
|
||||
"event": "TEST_EVENT",
|
||||
|
@ -95,7 +95,7 @@ class TestEventLogCollection(TestEventLogSchemaBase):
|
|||
total_entries=2)
|
||||
deserialized_event_logs = event_log_collection_schema.dump(instance)
|
||||
self.assertEqual(
|
||||
deserialized_event_logs.data,
|
||||
deserialized_event_logs,
|
||||
{
|
||||
"event_logs": [
|
||||
{
|
||||
|
|
|
@ -38,7 +38,7 @@ class TestPoolSchema(unittest.TestCase):
|
|||
pool_instance = session.query(Pool).filter(Pool.pool == pool_model.pool).first()
|
||||
serialized_pool = pool_schema.dump(pool_instance)
|
||||
self.assertEqual(
|
||||
serialized_pool.data,
|
||||
serialized_pool,
|
||||
{
|
||||
"name": "test_pool",
|
||||
"slots": 2,
|
||||
|
@ -53,7 +53,7 @@ class TestPoolSchema(unittest.TestCase):
|
|||
def test_desearialize(self, session):
|
||||
pool_dict = {"name": "test_pool", "slots": 3}
|
||||
deserialized_pool = pool_schema.load(pool_dict, session=session)
|
||||
self.assertNotIsInstance(deserialized_pool.data, Pool) # Checks if load_instance is set to True
|
||||
self.assertNotIsInstance(deserialized_pool, Pool) # Checks if load_instance is set to True
|
||||
|
||||
|
||||
class TestPoolCollectionSchema(unittest.TestCase):
|
||||
|
@ -89,5 +89,5 @@ class TestPoolCollectionSchema(unittest.TestCase):
|
|||
],
|
||||
"total_entries": 2,
|
||||
},
|
||||
pool_collection_schema.dump(instance).data,
|
||||
pool_collection_schema.dump(instance),
|
||||
)
|
||||
|
|
|
@ -56,7 +56,7 @@ class TestTaskSchema:
|
|||
"wait_for_downstream": False,
|
||||
"weight_rule": "downstream",
|
||||
}
|
||||
assert expected == result.data
|
||||
assert expected == result
|
||||
|
||||
|
||||
class TestTaskCollectionSchema:
|
||||
|
@ -96,4 +96,4 @@ class TestTaskCollectionSchema:
|
|||
],
|
||||
"total_entries": 1,
|
||||
}
|
||||
assert expected == result.data
|
||||
assert expected == result
|
||||
|
|
|
@ -31,7 +31,7 @@ class TestVersionInfoSchema(unittest.TestCase):
|
|||
])
|
||||
def test_serialize(self, git_commit):
|
||||
version_info = VersionInfo("VERSION", git_commit)
|
||||
current_data = version_info_schema.dump(version_info).data
|
||||
current_data = version_info_schema.dump(version_info)
|
||||
|
||||
expected_result = {'version': 'VERSION', 'git_version': git_commit}
|
||||
self.assertEqual(expected_result, current_data)
|
||||
|
|
|
@ -64,7 +64,7 @@ class TestXComCollectionItemSchema(TestXComSchemaBase):
|
|||
xcom_model = session.query(XCom).first()
|
||||
deserialized_xcom = xcom_collection_item_schema.dump(xcom_model)
|
||||
self.assertEqual(
|
||||
deserialized_xcom[0],
|
||||
deserialized_xcom,
|
||||
{
|
||||
'key': 'test_key',
|
||||
'timestamp': self.default_time,
|
||||
|
@ -84,7 +84,7 @@ class TestXComCollectionItemSchema(TestXComSchemaBase):
|
|||
}
|
||||
result = xcom_collection_item_schema.load(xcom_dump)
|
||||
self.assertEqual(
|
||||
result[0],
|
||||
result,
|
||||
{
|
||||
'key': 'test_key',
|
||||
'timestamp': self.default_time_parsed,
|
||||
|
@ -132,7 +132,7 @@ class TestXComCollectionSchema(TestXComSchemaBase):
|
|||
total_entries=xcom_models_query.count(),
|
||||
))
|
||||
self.assertEqual(
|
||||
deserialized_xcoms[0],
|
||||
deserialized_xcoms,
|
||||
{
|
||||
'xcom_entries': [
|
||||
{
|
||||
|
@ -177,7 +177,7 @@ class TestXComSchema(TestXComSchemaBase):
|
|||
xcom_model = session.query(XCom).first()
|
||||
deserialized_xcom = xcom_schema.dump(xcom_model)
|
||||
self.assertEqual(
|
||||
deserialized_xcom[0],
|
||||
deserialized_xcom,
|
||||
{
|
||||
'key': 'test_key',
|
||||
'timestamp': self.default_time,
|
||||
|
@ -199,7 +199,7 @@ class TestXComSchema(TestXComSchemaBase):
|
|||
}
|
||||
result = xcom_schema.load(xcom_dump)
|
||||
self.assertEqual(
|
||||
result[0],
|
||||
result,
|
||||
{
|
||||
'key': 'test_key',
|
||||
'timestamp': self.default_time_parsed,
|
||||
|
|
Загрузка…
Ссылка в новой задаче