зеркало из
1
0
Форкнуть 0

Add encoder samples and update CHANGELOG.md (#35816)

This commit is contained in:
Yalin Li 2024-06-26 13:18:48 -07:00 коммит произвёл GitHub
Родитель ea090e9523
Коммит c9ac11bd8a
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
17 изменённых файлов: 1293 добавлений и 551 удалений

Просмотреть файл

@ -1,14 +1,18 @@
# Release History
## 12.5.1 (Unreleased)
## 12.6.0b1 (Unreleased)
### Features Added
### Breaking Changes
* Added to support custom encoder in entity CRUD operations.
* Added to support custom Entity type.
* Added to support Entity property in Tuple and Enum types.
### Bugs Fixed
* Fixed a bug in encoder when Entity property has "@odata.type" provided.
* Fixed a bug in encoder that int32 and int64 are mapped to int32 when no "@odata.type" provided.
### Other Changes
* Removed value range validation for Entity property in int32 and int64.
## 12.5.0 (2024-01-10)

Просмотреть файл

@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/tables/azure-data-tables",
"Tag": "python/tables/azure-data-tables_7ddb8a1cfc"
"Tag": "python/tables/azure-data-tables_1fb1a4af1a"
}

Просмотреть файл

@ -17,3 +17,8 @@ STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default"
NEXT_TABLE_NAME = "x-ms-continuation-NextTableName"
NEXT_PARTITION_KEY = "x-ms-continuation-NextPartitionKey"
NEXT_ROW_KEY = "x-ms-continuation-NextRowKey"
MAX_INT32 = (2**31) - 1 # 2147483647
MIN_INT32 = -(2**31) # -2147483648
MAX_INT64 = (2**63) - 1 # 9223372036854775807
MIN_INT64 = -(2**63) # -9223372036854775808

Просмотреть файл

@ -11,8 +11,8 @@ from datetime import datetime
from math import isnan
from ._entity import EdmType, TableEntity
from ._deserialize import _convert_to_entity
from ._common_conversion import _encode_base64, _to_utc_datetime
from ._constants import MAX_INT32, MIN_INT32, MAX_INT64, MIN_INT64
_ODATA_SUFFIX = "@odata.type"
T = TypeVar("T")
@ -50,7 +50,11 @@ class TableEntityEncoderABC(abc.ABC, Generic[T]):
if isinstance(value, str):
return None, value
if isinstance(value, int):
return None, value # TODO: Test what happens if the supplied value exceeds int32.
if MIN_INT32 <= value <= MAX_INT32:
return None, value
if MIN_INT64 <= value <= MAX_INT64:
return EdmType.INT64, str(value)
return None, value
if isinstance(value, float):
if isnan(value):
return EdmType.DOUBLE, "NaN"
@ -133,9 +137,6 @@ class TableEntityEncoderABC(abc.ABC, Generic[T]):
:rtype: dict
"""
@abc.abstractmethod
def decode_entity(self, entity: Dict[str, Union[str, int, float, bool]]) -> T: ...
class TableEntityEncoder(TableEntityEncoderABC[Union[TableEntity, Mapping[str, Any]]]):
def encode_entity(self, entity: Union[TableEntity, Mapping[str, Any]]) -> Dict[str, Union[str, int, float, bool]]:
@ -168,18 +169,16 @@ class TableEntityEncoder(TableEntityEncoderABC[Union[TableEntity, Mapping[str, A
for key, value in entity.items():
edm_type, value = self.prepare_value(key, value)
try:
if _ODATA_SUFFIX in key or key + _ODATA_SUFFIX in entity:
odata = f"{key}{_ODATA_SUFFIX}"
if _ODATA_SUFFIX in key or odata in entity:
encoded[key] = value
continue
# The edm type is decided by value
# For example, when value=EntityProperty(str(uuid.uuid4), "Edm.Guid"),
# the type is string instead of Guid after encoded
if edm_type:
encoded[key + _ODATA_SUFFIX] = edm_type.value if hasattr(edm_type, "value") else edm_type
encoded[odata] = edm_type.value if hasattr(edm_type, "value") else edm_type
except TypeError:
pass
encoded[key] = value
return encoded
def decode_entity(self, entity: Dict[str, Union[str, int, float, bool]]) -> TableEntity:
return _convert_to_entity(entity)

Просмотреть файл

@ -4,4 +4,4 @@
# license information.
# --------------------------------------------------------------------------
VERSION = "12.5.1"
VERSION = "12.6.0b1"

Просмотреть файл

@ -20,7 +20,6 @@ USAGE:
2) TABLES_STORAGE_ACCOUNT_NAME - the Tables storage account name
3) TABLES_PRIMARY_STORAGE_ACCOUNT_KEY - the Tables storage account access key
"""
import sys
import asyncio
import os
from datetime import datetime

Просмотреть файл

@ -22,7 +22,6 @@ USAGE:
4) STORAGE_ACCOUNT_NAME - the blob storage account name
5) STORAGE_ACCOUNT_KEY - the blob storage account key
"""
import sys
import asyncio
import json
import os

Просмотреть файл

@ -0,0 +1,199 @@
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_custom_encoder_dataclass_async.py
DESCRIPTION:
These samples demonstrate the following: inserting entities into a table
and deleting entities from a table.
USAGE:
python sample_custom_encoder_dataclass_async.py
Set the environment variables with your own values before running the sample:
1) TABLES_STORAGE_ENDPOINT_SUFFIX - the Table service account URL suffix
2) TABLES_STORAGE_ACCOUNT_NAME - the name of the storage account
3) TABLES_PRIMARY_STORAGE_ACCOUNT_KEY - the storage account access key
"""
import os
import asyncio
from datetime import datetime, timezone
from uuid import uuid4, UUID
from dotenv import find_dotenv, load_dotenv
from dataclasses import dataclass, asdict
from typing import Dict, Union, Optional
from azure.data.tables import TableEntityEncoderABC, UpdateMode
from azure.data.tables.aio import TableClient
@dataclass
class Car:
partition_key: str
row_key: UUID
price: Optional[float] = None
last_updated: Optional[datetime] = None
product_id: Optional[UUID] = None
inventory_count: Optional[int] = None
barcode: Optional[bytes] = None
color: Optional[str] = None
maker: Optional[str] = None
model: Optional[str] = None
production_date: Optional[datetime] = None
mileage: Optional[int] = None
is_second_hand: Optional[bool] = None
class MyEncoder(TableEntityEncoderABC[Car]):
def prepare_key(self, key: UUID) -> str: # type: ignore[override]
return super().prepare_key(str(key))
def encode_entity(self, entity: Car) -> Dict[str, Union[str, int, float, bool]]:
encoded = {}
for key, value in asdict(entity).items():
if key == "partition_key":
encoded["PartitionKey"] = value # this property should be "PartitionKey" in encoded result
continue
if key == "row_key":
encoded["RowKey"] = str(value) # this property should be "RowKey" in encoded result
continue
edm_type, value = self.prepare_value(key, value)
if edm_type:
encoded[f"{key}@odata.type"] = edm_type.value if hasattr(edm_type, "value") else edm_type
encoded[key] = value
return encoded
class InsertUpdateDeleteEntity(object):
def __init__(self):
load_dotenv(find_dotenv())
self.access_key = os.environ["TABLES_PRIMARY_STORAGE_ACCOUNT_KEY"]
self.endpoint_suffix = os.environ["TABLES_STORAGE_ENDPOINT_SUFFIX"]
self.account_name = os.environ["TABLES_STORAGE_ACCOUNT_NAME"]
self.endpoint = f"{self.account_name}.table.{self.endpoint_suffix}"
self.connection_string = f"DefaultEndpointsProtocol=https;AccountName={self.account_name};AccountKey={self.access_key};EndpointSuffix={self.endpoint_suffix}"
self.table_name = "CustomEncoderDataClassAsync"
self.entity = Car(
partition_key="PK",
row_key=uuid4(),
price=4.99,
last_updated=datetime.today(),
product_id=uuid4(),
inventory_count=42,
barcode=b"135aefg8oj0ld58", # cspell:disable-line
color="white",
maker="maker",
model="model",
production_date=datetime(year=2014, month=4, day=1, hour=9, minute=30, second=45, tzinfo=timezone.utc),
mileage=2**31, # an int64 integer
is_second_hand=True,
)
async def create_delete_entity(self):
table_client = TableClient.from_connection_string(self.connection_string, self.table_name)
async with table_client:
await table_client.create_table()
result = await table_client.create_entity(entity=self.entity, encoder=MyEncoder())
print(f"Created entity: {result}")
result = await table_client.get_entity(
self.entity.partition_key,
self.entity.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Get entity result: {result}")
await table_client.delete_entity(
partition_key=self.entity.partition_key,
row_key=self.entity.row_key, # type: ignore[call-overload] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print("Successfully deleted!")
await table_client.delete_table()
print("Cleaned up")
async def upsert_update_entities(self):
table_client = TableClient.from_connection_string(
self.connection_string, table_name=f"{self.table_name}UpsertUpdate"
)
async with table_client:
await table_client.create_table()
entity1 = Car(
partition_key="PK",
row_key=uuid4(),
price=4.99,
last_updated=datetime.today(),
product_id=uuid4(),
inventory_count=42,
barcode=b"135aefg8oj0ld58", # cspell:disable-line
)
entity2 = Car(
partition_key=entity1.partition_key,
row_key=entity1.row_key,
color="red",
maker="maker2",
model="model2",
production_date=datetime(year=2014, month=4, day=1, hour=9, minute=30, second=45, tzinfo=timezone.utc),
mileage=2**31, # an int64 integer
is_second_hand=True,
)
await table_client.upsert_entity(mode=UpdateMode.REPLACE, entity=entity2, encoder=MyEncoder())
inserted_entity = await table_client.get_entity(
entity2.partition_key,
entity2.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Inserted entity: {inserted_entity}")
await table_client.upsert_entity(mode=UpdateMode.MERGE, entity=entity1, encoder=MyEncoder())
merged_entity = await table_client.get_entity(
entity1.partition_key,
entity1.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Merged entity: {merged_entity}")
entity3 = Car(
partition_key=entity1.partition_key,
row_key=entity2.row_key,
color="white",
)
await table_client.update_entity(mode=UpdateMode.REPLACE, entity=entity3, encoder=MyEncoder())
replaced_entity = await table_client.get_entity(
entity3.partition_key,
entity3.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Replaced entity: {replaced_entity}")
await table_client.update_entity(mode=UpdateMode.REPLACE, entity=entity2, encoder=MyEncoder())
merged_entity = await table_client.get_entity(
entity2.partition_key,
entity2.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Merged entity: {merged_entity}")
await table_client.delete_table()
print("Cleaned up")
async def main():
ide = InsertUpdateDeleteEntity()
await ide.create_delete_entity()
await ide.upsert_update_entities()
if __name__ == "__main__":
asyncio.run(main())

Просмотреть файл

@ -11,7 +11,7 @@ FILE: sample_insert_delete_entities_async.py
DESCRIPTION:
These samples demonstrate the following: inserting entities into a table
and deleting tables from a table.
and deleting entities from a table.
USAGE:
python sample_insert_delete_entities_async.py

Просмотреть файл

@ -20,7 +20,6 @@ USAGE:
2) TABLES_STORAGE_ACCOUNT_NAME - the Tables storage account name
3) TABLES_PRIMARY_STORAGE_ACCOUNT_KEY - the Tables storage account access key
"""
import sys
import os
from datetime import datetime
from dotenv import find_dotenv, load_dotenv

Просмотреть файл

@ -22,7 +22,6 @@ USAGE:
4) STORAGE_ACCOUNT_NAME - the blob storage account name
5) STORAGE_ACCOUNT_KEY - the blob storage account key
"""
import sys
import json
import os
from azure.storage.blob import BlobServiceClient

Просмотреть файл

@ -0,0 +1,193 @@
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_custom_encoder_dataclass.py
DESCRIPTION:
These samples demonstrate the following: inserting entities into a table
and deleting entities from a table.
USAGE:
python sample_custom_encoder_dataclass.py
Set the environment variables with your own values before running the sample:
1) TABLES_STORAGE_ENDPOINT_SUFFIX - the Table service account URL suffix
2) TABLES_STORAGE_ACCOUNT_NAME - the name of the storage account
3) TABLES_PRIMARY_STORAGE_ACCOUNT_KEY - the storage account access key
"""
import os
from datetime import datetime, timezone
from uuid import uuid4, UUID
from dotenv import find_dotenv, load_dotenv
from dataclasses import dataclass, asdict
from typing import Dict, Union, Optional
from azure.data.tables import TableClient, TableEntityEncoderABC, UpdateMode
@dataclass
class Car:
partition_key: str
row_key: UUID
price: Optional[float] = None
last_updated: Optional[datetime] = None
product_id: Optional[UUID] = None
inventory_count: Optional[int] = None
barcode: Optional[bytes] = None
color: Optional[str] = None
maker: Optional[str] = None
model: Optional[str] = None
production_date: Optional[datetime] = None
mileage: Optional[int] = None
is_second_hand: Optional[bool] = None
class MyEncoder(TableEntityEncoderABC[Car]):
def prepare_key(self, key: UUID) -> str: # type: ignore[override]
return super().prepare_key(str(key))
def encode_entity(self, entity: Car) -> Dict[str, Union[str, int, float, bool]]:
encoded = {}
for key, value in asdict(entity).items():
if key == "partition_key":
encoded["PartitionKey"] = value # this property should be "PartitionKey" in encoded result
continue
if key == "row_key":
encoded["RowKey"] = str(value) # this property should be "RowKey" in encoded result
continue
edm_type, value = self.prepare_value(key, value)
if edm_type:
encoded[f"{key}@odata.type"] = edm_type.value if hasattr(edm_type, "value") else edm_type
encoded[key] = value
return encoded
class InsertUpdateDeleteEntity(object):
def __init__(self):
load_dotenv(find_dotenv())
self.access_key = os.environ["TABLES_PRIMARY_STORAGE_ACCOUNT_KEY"]
self.endpoint_suffix = os.environ["TABLES_STORAGE_ENDPOINT_SUFFIX"]
self.account_name = os.environ["TABLES_STORAGE_ACCOUNT_NAME"]
self.endpoint = f"{self.account_name}.table.{self.endpoint_suffix}"
self.connection_string = f"DefaultEndpointsProtocol=https;AccountName={self.account_name};AccountKey={self.access_key};EndpointSuffix={self.endpoint_suffix}"
self.table_name = "CustomEncoderDataClass"
self.entity = Car(
partition_key="PK",
row_key=uuid4(),
price=4.99,
last_updated=datetime.today(),
product_id=uuid4(),
inventory_count=42,
barcode=b"135aefg8oj0ld58", # cspell:disable-line
color="white",
maker="maker",
model="model",
production_date=datetime(year=2014, month=4, day=1, hour=9, minute=30, second=45, tzinfo=timezone.utc),
mileage=2**31, # an int64 integer
is_second_hand=True,
)
def create_delete_entity(self):
table_client = TableClient.from_connection_string(self.connection_string, self.table_name)
with table_client:
table_client.create_table()
result = table_client.create_entity(entity=self.entity, encoder=MyEncoder())
print(f"Created entity: {result}")
result = table_client.get_entity(
self.entity.partition_key,
self.entity.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Get entity result: {result}")
table_client.delete_entity(
partition_key=self.entity.partition_key,
row_key=self.entity.row_key, # type: ignore[call-overload] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print("Successfully deleted!")
table_client.delete_table()
print("Cleaned up")
def upsert_update_entities(self):
table_client = TableClient.from_connection_string(
self.connection_string, table_name=f"{self.table_name}UpsertUpdate"
)
with table_client:
table_client.create_table()
entity1 = Car(
partition_key="PK",
row_key=uuid4(),
price=4.99,
last_updated=datetime.today(),
product_id=uuid4(),
inventory_count=42,
barcode=b"135aefg8oj0ld58", # cspell:disable-line
)
entity2 = Car(
partition_key=entity1.partition_key,
row_key=entity1.row_key,
color="red",
maker="maker2",
model="model2",
production_date=datetime(year=2014, month=4, day=1, hour=9, minute=30, second=45, tzinfo=timezone.utc),
mileage=2**31, # an int64 integer
is_second_hand=True,
)
table_client.upsert_entity(mode=UpdateMode.REPLACE, entity=entity2, encoder=MyEncoder())
inserted_entity = table_client.get_entity(
entity2.partition_key,
entity2.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Inserted entity: {inserted_entity}")
table_client.upsert_entity(mode=UpdateMode.MERGE, entity=entity1, encoder=MyEncoder())
merged_entity = table_client.get_entity(
entity1.partition_key,
entity1.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Merged entity: {merged_entity}")
entity3 = Car(
partition_key=entity1.partition_key,
row_key=entity2.row_key,
color="white",
)
table_client.update_entity(mode=UpdateMode.REPLACE, entity=entity3, encoder=MyEncoder())
replaced_entity = table_client.get_entity(
entity3.partition_key,
entity3.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Replaced entity: {replaced_entity}")
table_client.update_entity(mode=UpdateMode.REPLACE, entity=entity2, encoder=MyEncoder())
merged_entity = table_client.get_entity(
entity2.partition_key,
entity2.row_key, # type: ignore[arg-type] # intend to pass a non-string RowKey
encoder=MyEncoder(),
)
print(f"Merged entity: {merged_entity}")
table_client.delete_table()
print("Cleaned up")
if __name__ == "__main__":
ide = InsertUpdateDeleteEntity()
ide.create_delete_entity()
ide.upsert_update_entities()

Просмотреть файл

@ -11,7 +11,7 @@ FILE: sample_insert_delete_entities.py
DESCRIPTION:
These samples demonstrate the following: inserting entities into a table
and deleting tables from a table.
and deleting entities from a table.
USAGE:
python sample_insert_delete_entities.py

Просмотреть файл

@ -592,10 +592,8 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check whether other languages support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -609,28 +607,11 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -649,17 +630,56 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
in str(error.value)
)
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -673,7 +693,7 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -1363,10 +1383,8 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -1396,44 +1414,11 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -1444,7 +1429,7 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1457,7 +1442,7 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1466,17 +1451,88 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
assert "An error occurred while processing this request." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -1490,24 +1546,24 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -2220,10 +2276,8 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
client.upsert_entity({"PartitionKey": "PK1", "RowKey": "RK1"})
with pytest.raises(TypeError) as error:
@ -2248,34 +2302,39 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
client.upsert_entity({"PartitionKey": "PK2", "RowKey": "RK2"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
@ -2283,42 +2342,70 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -2327,24 +2414,24 @@ class TestTableEncoder(AzureRecordedTestCase, TableTestCase):
"Data3@odata.type": "Edm.Double",
}
_check_backcompat(test_entity, expected_entity)
client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
client.upsert_entity({"PartitionKey": "PK5", "RowKey": "RK5"})
verification = json.dumps(expected_entity, sort_keys=True)
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]

Просмотреть файл

@ -559,10 +559,8 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check whether other languages support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -576,28 +574,11 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -616,17 +597,56 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
in str(error.value)
)
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -640,7 +660,7 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -1332,10 +1352,8 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -1365,44 +1383,11 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -1413,7 +1398,7 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1426,7 +1411,7 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1435,17 +1420,88 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert "An error occurred while processing this request." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -1459,24 +1515,24 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -2191,10 +2247,8 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
await client.upsert_entity({"PartitionKey": "PK1", "RowKey": "RK1"})
with pytest.raises(TypeError) as error:
@ -2219,34 +2273,39 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
await client.upsert_entity({"PartitionKey": "PK2", "RowKey": "RK2"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
@ -2254,42 +2313,70 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
await client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
await client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "An error occurred while processing this request" in str(error.value)
assert error.value.error_code == "InvalidInput"
await client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -2298,24 +2385,24 @@ class TestTableEncoderAsync(AzureRecordedTestCase, AsyncTableTestCase):
"Data3@odata.type": "Edm.Double",
}
_check_backcompat(test_entity, expected_entity)
await client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
await client.upsert_entity({"PartitionKey": "PK5", "RowKey": "RK5"})
verification = json.dumps(expected_entity, sort_keys=True)
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]

Просмотреть файл

@ -501,14 +501,11 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check whether other languages support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
with pytest.raises(HttpResponseError) as error:
resp = client.create_entity(
@ -521,28 +518,11 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
assert "Operation returned an invalid status 'Bad Request'" in str(error.value)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -561,17 +541,56 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
in str(error.value)
)
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -585,7 +604,7 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -1283,10 +1302,8 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -1324,44 +1341,11 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
in str(error.value)
)
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -1372,7 +1356,7 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1385,7 +1369,7 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1394,17 +1378,88 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -1418,24 +1473,24 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -2159,10 +2214,8 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
client.upsert_entity({"PartitionKey": "PK1", "RowKey": "RK1"})
with pytest.raises(TypeError) as error:
@ -2193,34 +2246,39 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
in str(error.value)
)
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
client.upsert_entity({"PartitionKey": "PK2", "RowKey": "RK2"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
@ -2228,42 +2286,70 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -2272,24 +2358,24 @@ class TestTableEncoderCosmos(AzureRecordedTestCase, TableTestCase):
"Data3@odata.type": "Edm.Double",
}
_check_backcompat(test_entity, expected_entity)
client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
client.upsert_entity({"PartitionKey": "PK5", "RowKey": "RK5"})
verification = json.dumps(expected_entity, sort_keys=True)
resp = client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]

Просмотреть файл

@ -502,14 +502,11 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check whether other languages support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
with pytest.raises(HttpResponseError) as error:
resp = await client.create_entity(
@ -522,28 +519,11 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert "Operation returned an invalid status 'Bad Request'" in str(error.value)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -562,17 +542,56 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
in str(error.value)
)
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = await client.create_entity(
test_entity,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -586,7 +605,7 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}",
verify_headers={"Content-Type": "application/json;odata=nometadata"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -1288,10 +1307,8 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
@ -1329,44 +1346,11 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
in str(error.value)
)
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
with pytest.raises(TypeError) as error:
@ -1377,7 +1361,7 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1390,7 +1374,7 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
@ -1399,17 +1383,88 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Infinite float values
test_entity = {
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
"RowKey": "RK3",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -1423,24 +1478,24 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.upsert_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={
"Content-Type": "application/json",
"Accept": "application/json",
},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
@ -2168,10 +2223,8 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert list(resp.keys()) == ["date", "etag", "version"]
# Invalid int32 and int64 values
# TODO: Check with other languages whether they can support big int32. Also Cosmos.
# TODO: This will likely change if we move to post-request validation.
max_int64 = 9223372036854775808
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int(max_int64 * 1000)}
max_int64 = 9223372036854775807
test_entity = {"PartitionKey": "PK1", "RowKey": "RK1", "Data": int((max_int64 + 1) * 1000)}
expected_entity = test_entity
await client.upsert_entity({"PartitionKey": "PK1", "RowKey": "RK1"})
with pytest.raises(TypeError) as error:
@ -2202,34 +2255,39 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
in str(error.value)
)
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 - 1, "Edm.Int64")}
test_entity = {"PartitionKey": "PK2", "RowKey": "RK2", "Data": (max_int64 + 1, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK2",
"RowKey": "RK2",
"Data": str(max_int64 - 1),
"Data": str(max_int64 + 1),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
await client.upsert_entity({"PartitionKey": "PK2", "RowKey": "RK2"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK2", "RK2"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK2',RowKey='RK2')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
# Valid int64 value with Edm
test_entity = {"PartitionKey": "PK3", "RowKey": "RK3", "Data": (max_int64, "Edm.Int64")}
expected_entity = {
"PartitionKey": "PK3",
@ -2237,42 +2295,70 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
_check_backcompat(test_entity, expected_entity)
await client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK3", "RK3"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Valid int64 value without Edm
test_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": max_int64}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"Data": str(max_int64),
"Data@odata.type": "Edm.Int64",
}
response_entity = {"PartitionKey": "PK4", "RowKey": "RK4", "Data": EntityProperty(max_int64, EdmType.INT64)}
with pytest.raises(TypeError) as error:
_check_backcompat(test_entity, expected_entity)
assert "is too large to be cast to" in str(error.value)
await client.upsert_entity({"PartitionKey": "PK3", "RowKey": "RK3"})
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
with pytest.raises(HttpResponseError) as error:
await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK3',RowKey='RK3')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
)
assert "One of the input values is invalid." in str(error.value)
assert error.value.error_code == "InvalidInput"
await client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=json.dumps(expected_entity, sort_keys=True),
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), response_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
# Infinite float values
test_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": float("nan"),
"Data2": float("inf"),
"Data3": float("-inf"),
}
expected_entity = {
"PartitionKey": "PK4",
"RowKey": "RK4",
"PartitionKey": "PK5",
"RowKey": "RK5",
"Data1": "NaN",
"Data1@odata.type": "Edm.Double",
"Data2": "Infinity",
@ -2281,24 +2367,24 @@ class TestTableEncoderCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
"Data3@odata.type": "Edm.Double",
}
_check_backcompat(test_entity, expected_entity)
await client.upsert_entity({"PartitionKey": "PK4", "RowKey": "RK4"})
await client.upsert_entity({"PartitionKey": "PK5", "RowKey": "RK5"})
verification = json.dumps(expected_entity, sort_keys=True)
resp = await client.update_entity(
test_entity,
mode=UpdateMode.MERGE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]
resp = await client.update_entity(
test_entity,
mode=UpdateMode.REPLACE,
verify_payload=verification,
verify_url=f"/{table_name}(PartitionKey='PK4',RowKey='RK4')",
verify_url=f"/{table_name}(PartitionKey='PK5',RowKey='RK5')",
verify_headers={"Content-Type": "application/json", "Accept": "application/json", "If-Match": "*"},
verify_response=(lambda: client.get_entity("PK4", "RK4"), test_entity),
verify_response=(lambda: client.get_entity("PK5", "RK5"), test_entity),
)
assert list(resp.keys()) == ["date", "etag", "version"]