зеркало из
1
0
Форкнуть 0
This commit is contained in:
Yalin Li 2023-07-13 11:21:22 -07:00 коммит произвёл GitHub
Родитель 48d428e6c1
Коммит 621c9845ce
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
27 изменённых файлов: 735 добавлений и 597 удалений

Просмотреть файл

@ -38,7 +38,11 @@ class AzureSigningError(ClientAuthenticationError):
class _HttpChallenge(object): # pylint:disable=too-few-public-methods
"""Represents a parsed HTTP WWW-Authentication Bearer challenge from a server."""
"""Represents a parsed HTTP WWW-Authentication Bearer challenge from a server.
:param challenge: The WWW-Authenticate header of the challenge response.
:type challenge: str
"""
def __init__(self, challenge):
if not challenge:
@ -78,7 +82,6 @@ class _HttpChallenge(object): # pylint:disable=too-few-public-methods
self.resource = self._parameters.get("resource") or self._parameters.get("resource_id") or ""
# pylint: disable=no-self-use
class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
def __init__(self, credential, is_emulated=False):
self._credential = credential
@ -139,7 +142,7 @@ class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
except Exception as ex:
# Wrap any error that occurred as signing error
# Doing so will clarify/locate the source of problem
raise _wrap_exception(ex, AzureSigningError)
raise _wrap_exception(ex, AzureSigningError) from ex
def on_request(self, request: PipelineRequest) -> None:
self.sign_request(request)
@ -198,6 +201,7 @@ class BearerTokenChallengePolicy(BearerTokenCredentialPolicy):
:param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge
:param ~azure.core.pipeline.PipelineResponse response: the resource provider's response
:returns: a bool indicating whether the policy should send the request
:rtype: bool
"""
if not self._discover_tenant and not self._discover_scopes:
# We can't discover the tenant or use a different scope; the request will fail because it hasn't changed
@ -268,5 +272,5 @@ def _configure_credential(
if isinstance(credential, AzureNamedKeyCredential):
return SharedKeyCredentialPolicy(credential)
if credential is not None:
raise TypeError("Unsupported credential: {}".format(credential))
raise TypeError(f"Unsupported credential: {credential}")
return None

Просмотреть файл

@ -64,9 +64,7 @@ def get_api_version(kwargs: Dict[str, Any], default: str) -> str:
if api_version and api_version not in _SUPPORTED_API_VERSIONS:
versions = "\n".join(_SUPPORTED_API_VERSIONS)
raise ValueError(
"Unsupported API version '{}'. Please select from:\n{}".format(
api_version, versions
)
f"Unsupported API version '{api_version}'. Please select from:\n{versions}"
)
return api_version or default
@ -81,11 +79,11 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
try:
if not account_url.lower().startswith("http"):
account_url = "https://" + account_url
except AttributeError:
raise ValueError("Account URL must be a string.")
except AttributeError as exc:
raise ValueError("Account URL must be a string.") from exc
parsed_url = urlparse(account_url.rstrip("/"))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(account_url))
raise ValueError(f"Invalid URL: {account_url}")
_, sas_token = parse_query(parsed_url.query)
if not sas_token and not credential:
@ -118,10 +116,8 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
raise ValueError("Token credential is only supported with HTTPS.")
if hasattr(self.credential, "named_key"):
self.account_name = self.credential.named_key.name # type: ignore
secondary_hostname = "{}-secondary.table.{}".format(
self.credential.named_key.name, # type: ignore
os.getenv("TABLES_STORAGE_ENDPOINT_SUFFIX", DEFAULT_STORAGE_ENDPOINT_SUFFIX)
)
endpoint_suffix = os.getenv("TABLES_STORAGE_ENDPOINT_SUFFIX", DEFAULT_STORAGE_ENDPOINT_SUFFIX)
secondary_hostname = f"{self.account_name}-secondary.table.{endpoint_suffix}"
if not self._hosts:
if len(account) > 1:
@ -143,19 +139,23 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
self._policies.insert(0, CosmosPatchTransformPolicy())
@property
def url(self):
def url(self) -> str:
"""The full endpoint URL to this entity, including SAS token if used.
This could be either the primary endpoint,
or the secondary endpoint depending on the current :func:`location_mode`.
:return: The full endpoint URL including SAS token if used.
:rtype: str
"""
return self._format_url(self._hosts[self._location_mode])
return self._format_url(self._hosts[self._location_mode]) # type: ignore
@property
def _primary_endpoint(self):
"""The full primary endpoint URL.
:type: str
:return: The full primary endpoint URL.
:rtype: str
"""
return self._format_url(self._hosts[LocationMode.PRIMARY])
@ -163,6 +163,7 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
def _primary_hostname(self):
"""The hostname of the primary endpoint.
:return: The hostname of the primary endpoint.
:type: str
"""
return self._hosts[LocationMode.PRIMARY]
@ -174,8 +175,9 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional
`secondary_hostname` keyword argument on instantiation.
:return: The full secondary endpoint URL.
:type: str
:raise ValueError:
:raise ValueError: If the secondary endpoint URL is not configured.
"""
if not self._hosts[LocationMode.SECONDARY]:
raise ValueError("No secondary host configured.")
@ -188,34 +190,28 @@ class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes
If not available this will be None. To explicitly specify a secondary hostname, use the optional
`secondary_hostname` keyword argument on instantiation.
:return: The hostname of the secondary endpoint.
:type: str or None
"""
return self._hosts[LocationMode.SECONDARY]
@property
def api_version(self):
def api_version(self) -> str:
"""The version of the Storage API used for requests.
:return: The Storage API version.
:type: str
"""
return self._client._config.version # pylint: disable=protected-access
return self._client._config.version # type: ignore # pylint: disable=protected-access
class TablesBaseClient(AccountHostsMixin):
"""Base class for TableClient
:param str endpoint: A URL to an Azure Tables account.
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or TokenCredentials from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.TokenCredential`
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02". Note that overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:ivar str account_name: The name of the Tables account.
:ivar str scheme: The scheme component in the full URL to the Tables account.
:ivar str url: The storage endpoint.
:ivar str api_version: The service API version.
"""
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
@ -225,6 +221,21 @@ class TablesBaseClient(AccountHostsMixin):
credential: Optional[Union[AzureSasCredential, AzureNamedKeyCredential, TokenCredential]] = None,
**kwargs
) -> None:
"""Create TablesBaseClient from a Credential.
:param str endpoint: A URL to an Azure Tables account.
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or a TokenCredential implementation from azure-identity.
:paramtype credential:
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials.TokenCredential or None
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02".
:paramtype api_version: str
"""
super(TablesBaseClient, self).__init__(endpoint, credential=credential, **kwargs) # type: ignore
self._client = AzureTable(
self.url,
@ -259,16 +270,25 @@ class TablesBaseClient(AccountHostsMixin):
]
def _batch_send(self, table_name: str, *reqs: HttpRequest, **kwargs) -> List[Mapping[str, Any]]:
"""Given a series of request, do a Storage batch call."""
# pylint:disable=docstring-should-be-keyword
"""Given a series of request, do a Storage batch call.
:param table_name: The table name.
:type table_name: str
:param reqs: The HTTP request.
:type reqs: ~azure.core.pipeline.transport.HttpRequest
:return: A list of batch part metadata in response.
:rtype: list[Mapping[str, Any]]
"""
# Pop it here, so requests doesn't feel bad about additional kwarg
policies = [StorageHeadersPolicy()]
changeset = HttpRequest("POST", None) # type: ignore
changeset.set_multipart_mixed(
*reqs, policies=policies, boundary="changeset_{}".format(uuid4()) # type: ignore
*reqs, policies=policies, boundary=f"changeset_{uuid4()}" # type: ignore
)
request = self._client._client.post( # pylint: disable=protected-access
url="{}://{}/$batch".format(self.scheme, self._primary_hostname),
url=f"{self.scheme}://{self._primary_hostname}/$batch",
headers={
"x-ms-version": self.api_version,
"DataServiceVersion": "3.0",
@ -281,7 +301,7 @@ class TablesBaseClient(AccountHostsMixin):
changeset,
policies=policies,
enforce_https=False,
boundary="batch_{}".format(uuid4()),
boundary=f"batch_{uuid4()}",
)
pipeline_response = self._client._client._pipeline.run(request, **kwargs) # pylint: disable=protected-access
response = pipeline_response.http_response
@ -322,6 +342,9 @@ class TransportWrapper(HttpTransport):
"""Wrapper class that ensures that an inner client created
by a `get_client` method does not close the outer transport for the parent
when used in a context manager.
:param transport: The Http Transport instance
:type transport: ~azure.core.pipeline.transport.HttpTransport
"""
def __init__(self, transport):
self._transport = transport
@ -352,10 +375,10 @@ def parse_connection_str(conn_str, credential, keyword_args):
if not credential:
try:
credential = AzureNamedKeyCredential(name=conn_settings["accountname"], key=conn_settings["accountkey"])
except KeyError:
except KeyError as exc:
credential = conn_settings.get("sharedaccesssignature", None)
if not credential:
raise ValueError("Connection string missing required connection details.")
raise ValueError("Connection string missing required connection details.") from exc
credential = AzureSasCredential(credential)
primary = conn_settings.get("tableendpoint")
secondary = conn_settings.get("tablesecondaryendpoint")
@ -363,14 +386,8 @@ def parse_connection_str(conn_str, credential, keyword_args):
if secondary:
raise ValueError("Connection string specifies only secondary endpoint.")
try:
primary = "{}://{}.table.{}".format(
conn_settings["defaultendpointsprotocol"],
conn_settings["accountname"],
conn_settings["endpointsuffix"],
)
secondary = "{}-secondary.table.{}".format(
conn_settings["accountname"], conn_settings["endpointsuffix"]
)
primary = f"{conn_settings['defaultendpointsprotocol']}://{conn_settings['accountname']}.table.{conn_settings['endpointsuffix']}" # pylint: disable=line-too-long
secondary = f"{conn_settings['accountname']}-secondary.table.{conn_settings['endpointsuffix']}"
except KeyError:
pass
@ -380,12 +397,9 @@ def parse_connection_str(conn_str, credential, keyword_args):
else:
endpoint_suffix = os.getenv("TABLES_STORAGE_ENDPOINT_SUFFIX", DEFAULT_STORAGE_ENDPOINT_SUFFIX)
try:
primary = "https://{}.table.{}".format(
conn_settings["accountname"],
conn_settings.get("endpointsuffix", endpoint_suffix),
)
except KeyError:
raise ValueError("Connection string missing required connection details.")
primary = f"https://{conn_settings['accountname']}.table.{conn_settings.get('endpointsuffix', endpoint_suffix)}" # pylint: disable=line-too-long
except KeyError as exc:
raise ValueError("Connection string missing required connection details.") from exc
if "secondary_hostname" not in keyword_args:
keyword_args["secondary_hostname"] = secondary
@ -416,7 +430,7 @@ def parse_query(query_str):
sas_values = QueryStringConstants.to_list()
parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()}
sas_params = [
"{}={}".format(k, quote(v, safe=""))
f"{k}={quote(v, safe='')}"
for k, v in parsed_query.items()
if k in sas_values
]

Просмотреть файл

@ -5,31 +5,8 @@
# --------------------------------------------------------------------------
import base64
import hashlib
import datetime
import hmac
class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
"""UTF offset for UTC is 0."""
return datetime.timedelta(0)
def tzname(self, dt):
"""Timestamp representation."""
return "Z"
def dst(self, dt):
"""No daylight saving for UTC."""
return datetime.timedelta(hours=1)
try:
from datetime import timezone
TZ_UTC = timezone.utc # type: ignore
except ImportError:
TZ_UTC = UTC() # type: ignore
from datetime import timezone
def _to_str(value):
@ -38,7 +15,7 @@ def _to_str(value):
def _to_utc_datetime(value):
try:
value = value.astimezone(TZ_UTC)
value = value.astimezone(timezone.utc)
except ValueError:
# Before Python 3.8, this raised for a naive datetime.
pass

Просмотреть файл

@ -7,10 +7,10 @@ from typing import Union, Dict, Any, Optional
from uuid import UUID
import logging
import datetime
from datetime import datetime, timezone
from ._entity import EntityProperty, EdmType, TableEntity
from ._common_conversion import _decode_base64_to_bytes, TZ_UTC
from ._common_conversion import _decode_base64_to_bytes
_LOGGER = logging.getLogger(__name__)
@ -21,7 +21,7 @@ except ImportError:
from urllib2 import quote # type: ignore
class TablesEntityDatetime(datetime.datetime):
class TablesEntityDatetime(datetime):
@property
def tables_service_value(self):
@ -61,13 +61,13 @@ def _from_entity_datetime(value):
cleaned_value = clean_up_dotnet_timestamps(value)
try:
dt_obj = TablesEntityDatetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%S.%fZ").replace(
tzinfo=TZ_UTC
tzinfo=timezone.utc
)
except ValueError:
dt_obj = TablesEntityDatetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%SZ").replace(
tzinfo=TZ_UTC
tzinfo=timezone.utc
)
dt_obj._service_value = value # pylint:disable=protected-access
dt_obj._service_value = value # pylint:disable=protected-access,assigning-non-slot
return dt_obj
@ -141,6 +141,11 @@ def _convert_to_entity(entry_element):
"PartitionKey":"my_partition_key",
"RowKey":"my_row_key"
}
:param entry_element: The entity in response.
:type entry_element: Mapping[str, Any]
:return: An entity dict with additional metadata.
:rtype: dict[str, Any]
"""
entity = TableEntity()
@ -213,7 +218,13 @@ def _convert_to_entity(entry_element):
def _extract_etag(response):
""" Extracts the etag from the response headers. """
""" Extracts the etag from the response headers.
:param response: The PipelineResponse object.
:type response: ~azure.core.pipeline.PipelineResponse
:return: The etag from the response headers
:rtype: str or None
"""
if response and response.headers:
return response.headers.get("etag")
@ -231,8 +242,8 @@ def _extract_continuation_token(continuation_token):
return None, None
try:
return continuation_token.get("PartitionKey"), continuation_token.get("RowKey")
except AttributeError:
raise ValueError("Invalid continuation token format.")
except AttributeError as exc:
raise ValueError("Invalid continuation token format.") from exc
def _normalize_headers(headers):

Просмотреть файл

@ -17,9 +17,10 @@ class TableEntity(dict):
@property
def metadata(self) -> Dict[str, Any]:
"""Resets metadata to be a part of the entity
"""Resets metadata to be a part of the entity.
:return Dict of entity metadata
:rtype: Dict[str, Any]
:rtype: dict[str, Any]
"""
return self._metadata
@ -69,5 +70,5 @@ the below with both create STRING typed properties on the entity::
:param value:
:type value: Any
:param edm_type: Type of the value
:type edm_type: str or :class:`~azure.data.tables.EdmType`
:type edm_type: str or ~azure.data.tables.EdmType
"""

Просмотреть файл

@ -168,11 +168,11 @@ def _decode_error(response, error_message=None, error_type=None, **kwargs): # p
error_type = HttpResponseError
try:
error_message += "\nErrorCode:{}".format(error_code.value)
error_message += f"\nErrorCode:{error_code.value}"
except AttributeError:
error_message += "\nErrorCode:{}".format(error_code)
error_message += f"\nErrorCode:{error_code}"
for name, info in additional_data.items():
error_message += "\n{}:{}".format(name, info)
error_message += f"\n{name}:{info}"
error = error_type(message=error_message, response=response, **kwargs)
error.error_code = error_code
@ -184,16 +184,16 @@ def _reraise_error(decoded_error):
_, _, exc_traceback = sys.exc_info()
try:
raise decoded_error.with_traceback(exc_traceback)
except AttributeError:
except AttributeError as exc:
decoded_error.__traceback__ = exc_traceback
raise decoded_error
raise decoded_error from exc
def _process_table_error(storage_error, table_name=None):
try:
decoded_error = _decode_error(storage_error.response, storage_error.message)
except AttributeError:
raise storage_error
except AttributeError as exc:
raise storage_error from exc
if table_name:
_validate_tablename_error(decoded_error, table_name)
_reraise_error(decoded_error)
@ -227,8 +227,8 @@ def _reprocess_error(decoded_error, identifiers=None):
"Too many access policies provided. The server does not support setting more than 5 access policies"\
"on a single resource."
)
except AttributeError:
raise decoded_error
except AttributeError as exc:
raise decoded_error from exc
class TableTransactionError(HttpResponseError):

Просмотреть файл

@ -74,9 +74,7 @@ class TableAccessPolicy(GenAccessPolicy):
self.permission = kwargs.get('permission')
def __repr__(self) -> str:
return "TableAccessPolicy(start={}, expiry={}, permission={})".format(
self.start, self.expiry, self.permission
)[1024:]
return f"TableAccessPolicy(start={self.start}, expiry={self.expiry}, permission={self.permission})"[1024:]
class TableAnalyticsLogging(GeneratedLogging):
@ -93,7 +91,7 @@ class TableAnalyticsLogging(GeneratedLogging):
"""
def __init__(self, **kwargs) -> None: # pylint: disable=super-init-not-called
self.version = kwargs.get("version", u"1.0")
self.version = kwargs.get("version", "1.0")
self.delete = kwargs.get("delete", False)
self.read = kwargs.get("read", False)
self.write = kwargs.get("write", False)
@ -114,9 +112,8 @@ class TableAnalyticsLogging(GeneratedLogging):
)
def __repr__(self) -> str:
return "TableAnalyticsLogging(version={}, delete={}, read={}, write={}, retention_policy={})".format(
self.version, self.delete, self.read, self.write, self.retention_policy
)[1024:]
return f"TableAnalyticsLogging(version={self.version}, delete={self.delete}, read={self.read}, \
write={self.write}, retention_policy={self.retention_policy})"[1024:]
class TableMetrics(GeneratedMetrics):
@ -133,7 +130,7 @@ class TableMetrics(GeneratedMetrics):
"""
def __init__(self, **kwargs) -> None: # pylint: disable=super-init-not-called
self.version = kwargs.get("version", u"1.0")
self.version = kwargs.get("version", "1.0")
self.enabled = kwargs.get("enabled", False)
self.include_apis = kwargs.get("include_apis")
self.retention_policy = kwargs.get("retention_policy") or TableRetentionPolicy()
@ -142,7 +139,10 @@ class TableMetrics(GeneratedMetrics):
def _from_generated(cls, generated) -> 'TableMetrics':
"""A summary of request statistics grouped by API in hour or minute aggregates.
:param TableMetrics generated: generated Metrics
:param generated: Generated Metrics.
:type generated: ~azure.data.tables._generated.models.Metrics
:return: A TableMetrics object.
:rtype: ~azure.data.tables.TableMetrics
"""
if not generated:
return cls()
@ -156,9 +156,8 @@ class TableMetrics(GeneratedMetrics):
)
def __repr__(self) -> str:
return "TableMetrics(version={}, enabled={}, include_apis={}, retention_policy={})".format(
self.version, self.enabled, self.include_apis, self.retention_policy
)[1024:]
return f"TableMetrics(version={self.version}, enabled={self.enabled}, include_apis={self.include_apis}, \
retention_policy={self.retention_policy})"[1024:]
class TableRetentionPolicy(GeneratedRetentionPolicy):
@ -186,7 +185,10 @@ class TableRetentionPolicy(GeneratedRetentionPolicy):
All required parameters must be populated in order to send to Azure.
:param TableRetentionPolicy generated: Generated Retention Policy
:param generated: Generated Retention Policy.
:type generated: ~azure.data.tables._generated.models.RetentionPolicy
:return: A TableRetentionPolicy object.
:rtype: ~azure.data.tables.TableRetentionPolicy
"""
if not generated:
@ -196,10 +198,10 @@ class TableRetentionPolicy(GeneratedRetentionPolicy):
days=generated.days,
)
def __repr__(self) -> str:
return "TableRetentionPolicy(enabled={}, days={})".format(self.enabled, self.days)[1024:]
return f"TableRetentionPolicy(enabled={self.enabled}, days={self.days})"[1024:]
class TableCorsRule(object):
class TableCorsRule:
"""CORS is an HTTP feature that enables a web application running under one
domain to access resources in another domain. Web browsers implement a
security restriction known as same-origin policy that prevents a web page
@ -208,22 +210,22 @@ class TableCorsRule(object):
All required parameters must be populated in order to send to Azure.
:param List[str] allowed_origins:
:param list[str] allowed_origins:
A list of origin domains that will be allowed via CORS, or "*" to allow
all domains. The list of must contain at least one entry. Limited to 64
origin domains. Each allowed origin can have up to 256 characters.
:param List[str] allowed_methods:
:param list[str] allowed_methods:
A list of HTTP methods that are allowed to be executed by the origin.
The list of must contain at least one entry. For Azure Storage,
permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT.
:keyword int max_age_in_seconds:
The number of seconds that the client/browser should cache a
pre-flight response.
:keyword List[str] exposed_headers:
:keyword list[str] exposed_headers:
Defaults to an empty list. A list of response headers to expose to CORS
clients. Limited to 64 defined headers and two prefixed headers. Each
header can be up to 256 characters.
:keyword List[str] allowed_headers:
:keyword list[str] allowed_headers:
Defaults to an empty list. A list of headers allowed to be part of
the cross-origin request. Limited to 64 defined headers and 2 prefixed
headers. Each header can be up to 256 characters.
@ -258,9 +260,9 @@ class TableCorsRule(object):
)
def __repr__(self) -> str:
return "TableCorsRules(allowed_origins={}, allowed_methods={}, allowed_headers={}, exposed_headers={}, max_age_in_seconds={})".format( # pylint: disable=line-too-long
self.allowed_origins, self.allowed_methods, self.allowed_headers, self.exposed_headers, self.max_age_in_seconds # pylint: disable=line-too-long
)[1024:]
return f"TableCorsRules(allowed_origins={self.allowed_origins}, allowed_methods={self.allowed_methods}, \
allowed_headers={self.allowed_headers}, exposed_headers={self.exposed_headers}, \
max_age_in_seconds={self.max_age_in_seconds})"[1024:]
class TablePropertiesPaged(PageIterator):
@ -285,7 +287,7 @@ class TablePropertiesPaged(PageIterator):
self.filter = kwargs.get("filter")
self._location_mode = None
def _get_next_cb(self, continuation_token, **kwargs):
def _get_next_cb(self, continuation_token, **kwargs): # pylint: disable=inconsistent-return-statements
try:
return self._command(
top=self.results_per_page,
@ -331,7 +333,7 @@ class TableEntityPropertiesPaged(PageIterator):
self.select = kwargs.get("select")
self._location_mode = None
def _get_next_cb(self, continuation_token, **kwargs):
def _get_next_cb(self, continuation_token, **kwargs): # pylint: disable=inconsistent-return-statements
next_partition_key, next_row_key = _extract_continuation_token(
continuation_token
)
@ -361,7 +363,7 @@ class TableEntityPropertiesPaged(PageIterator):
return next_entity or None, props_list
class TableSasPermissions(object):
class TableSasPermissions:
def __init__(self, **kwargs) -> None:
"""
:keyword bool read:
@ -381,15 +383,19 @@ class TableSasPermissions(object):
def __or__(self, other: 'TableSasPermissions') -> 'TableSasPermissions':
"""
:param other:
:type other: :class:`~azure.data.tables.TableSasPermissions`
:param other: An TableSasPermissions object to add in logic "or".
:type other: ~azure.data.tables.TableSasPermissions
:return: An TableSasPermissions object
:rtype: ~azure.data.tables.TableSasPermissions
"""
return TableSasPermissions(_str=str(self) + str(other))
def __add__(self, other: 'TableSasPermissions') -> 'TableSasPermissions':
"""
:param other:
:type other: :class:`~azure.data.tables.TableSasPermissions`
:param other: An TableSasPermissions object to add in logic "add".
:type other: ~azure.data.tables.TableSasPermissions
:return: An TableSasPermissions object
:rtype: ~azure.data.tables.TableSasPermissions
"""
return TableSasPermissions(_str=str(self) + str(other))
@ -402,9 +408,8 @@ class TableSasPermissions(object):
)
def __repr__(self) -> str:
return "TableSasPermissions(read={}, add={}, update={}, delete={})".format(
self.read, self.add, self.update, self.delete
)[1024:]
return f"TableSasPermissions(read={self.read}, add={self.add}, update={self.update}, \
delete={self.delete})"[1024:]
@classmethod
def from_string(cls, permission: str, **kwargs) -> 'TableSasPermissions':
@ -417,7 +422,7 @@ class TableSasPermissions(object):
:param str permission: Specify permissions in
the string with the first letter of the word.
:return: An TableSasPermissions object
:rtype: :class:`~azure.data.tables.TableSasPermissions`
:rtype: ~azure.data.tables.TableSasPermissions
"""
p_read = "r" in permission
p_add = "a" in permission
@ -432,7 +437,13 @@ class TableSasPermissions(object):
def service_stats_deserialize(generated: GenTableServiceStats) -> Dict[str, Any]:
"""Deserialize a ServiceStats objects into a dict."""
"""Deserialize a ServiceStats objects into a dict.
:param generated: The generated TableServiceStats.
:type generated: ~azure.data.tables._generated.models.TableServiceStats
:return: The deserialized TableServiceStats.
:rtype: dict
"""
return {
"geo_replication": {
"status": generated.geo_replication.status, # type: ignore
@ -442,7 +453,13 @@ def service_stats_deserialize(generated: GenTableServiceStats) -> Dict[str, Any]
def service_properties_deserialize(generated: GenTableServiceProperties) -> Dict[str, Any]:
"""Deserialize a ServiceProperties objects into a dict."""
"""Deserialize a ServiceProperties objects into a dict.
:param generated: The generated TableServiceProperties
:type generated: ~azure.data.tables._generated.models.TableServiceProperties
:return: The deserialized TableServiceProperties.
:rtype: dict
"""
return {
"analytics_logging": TableAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access
"hour_metrics": TableMetrics._from_generated( # pylint: disable=protected-access
@ -458,12 +475,12 @@ def service_properties_deserialize(generated: GenTableServiceProperties) -> Dict
}
class TableItem(object):
class TableItem:
"""
Represents an Azure TableItem.
Returned by TableServiceClient.list_tables and TableServiceClient.query_tables.
:ivar str name: The name of the table.
:param str name: The name of the table.
"""
def __init__(self, name: str) -> None:
@ -478,7 +495,7 @@ class TableItem(object):
return cls(generated.table_name) # type: ignore
def __repr__(self) -> str:
return "TableItem(name={})".format(self.name)[1024:]
return f"TableItem(name={self.name})"[1024:]
class TablePayloadFormat(object):
@ -560,7 +577,7 @@ class ResourceTypes(object):
:param str string: Specify service, container, or object in
in the string with the first letter of the word.
:return: A ResourceTypes object
:rtype: :class:`~azure.data.tables.ResourceTypes`
:rtype: ~azure.data.tables.ResourceTypes
"""
res_service = "s" in string
res_object = "o" in string
@ -631,7 +648,7 @@ class AccountSasPermissions(object):
:param permission: Specify permissions in the string with the first letter of the word.
:type permission: str
:return: An AccountSasPermissions object
:rtype: :class:`~azure.data.tables.AccountSasPermissions`
:rtype: ~azure.data.tables.AccountSasPermissions
"""
p_read = "r" in permission
p_write = "w" in permission

Просмотреть файл

@ -27,6 +27,12 @@ from ._models import LocationMode
def set_next_host_location(settings: Dict[str, Any], request: PipelineRequest) -> None:
"""
A function which sets the next host location on the request, if applicable.
:param settings: The current retry context settings.
:type settings: dict[str, Any]
:param request: The outgoing request.
:type request: ~azure.core.pipeline.PipelineRequest
:return: None
"""
if request.http_request.method not in ['GET', 'HEAD']:
return
@ -77,7 +83,7 @@ class StorageHosts(SansIOHTTPPolicy):
request.context.options["retry_to_secondary"] = False
if use_location not in self.hosts:
raise ValueError(
"Attempting to use undefined host location {}".format(use_location)
f"Attempting to use undefined host location {use_location}"
)
if use_location != location_mode:
# Update request URL to use the specified location
@ -93,37 +99,48 @@ class TablesRetryPolicy(RetryPolicy):
The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis.
:keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host
location. Default value is False.
:keyword int retry_total: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:keyword int retry_connect: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:keyword int retry_read: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:keyword int retry_status: How many times to retry on bad status codes. Default value is 3.
:keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}.
In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))`
seconds. If the backoff_factor is 0.1, then the retry will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8.
:keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
:keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential.
:keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days).
:ivar int total_retries: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:ivar int connect_retries: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:ivar int read_retries: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:ivar int status_retries: How many times to retry on bad status codes. Default value is 3.
:ivar float backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}. In 'exponential' mode,
retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds.
If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries.
The default value is 0.8.
:ivar int backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
"""
def __init__(self, **kwargs):
"""
:keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host
location. Default value is False.
:keyword int retry_total: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:keyword int retry_connect: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:keyword int retry_read: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:keyword int retry_status: How many times to retry on bad status codes. Default value is 3.
:keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}. In 'exponential' mode,
retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds.
If the retry_backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries.
The default value is 0.8.
:keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
:keyword retry_mode: Fixed or exponential delay between attemps, default is exponential.
:type retry_mode: ~azure.core.pipeline.policies.RetryMode
:keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days).
"""
super(TablesRetryPolicy, self).__init__(**kwargs)
self.retry_to_secondary = kwargs.get('retry_to_secondary', False)
@ -133,6 +150,12 @@ class TablesRetryPolicy(RetryPolicy):
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
:param dict settings: The retry settings.
:param response: The PipelineResponse object
:type response: ~azure.core.pipeline.PipelineResponse
:return: True if method/status code is retryable. False if not retryable.
:rtype: bool
"""
should_retry = super(TablesRetryPolicy, self).is_retry(settings, response)
status = response.http_response.status_code
@ -145,8 +168,9 @@ class TablesRetryPolicy(RetryPolicy):
"""Configures the retry settings.
:param options: keyword arguments from context.
:type options: dict
:return: A dict containing settings and history for retries.
:rtype: Dict
:rtype: dict
"""
config = super(TablesRetryPolicy, self).configure_retries(options)
config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary)
@ -165,11 +189,14 @@ class TablesRetryPolicy(RetryPolicy):
super(TablesRetryPolicy, self).update_context(context, retry_settings)
context['location_mode'] = retry_settings['mode']
def update_request(self, request, retry_settings): # pylint:disable=no-self-use
def update_request(self, request, retry_settings):
"""Updates the pipeline request before attempting to retry.
:param PipelineRequest request: The outgoing request.
:param Dict(str, Any) retry_settings: The current retry context settings.
:param request: The outgoing request.
:type request: ~azure.core.pipeline.PipelineRequest
:param retry_settings: The current retry context settings.
:type retry_settings: dict[str, Any]
:return: None
"""
set_next_host_location(retry_settings, request)
@ -179,7 +206,7 @@ class TablesRetryPolicy(RetryPolicy):
:param request: The PipelineRequest object
:type request: ~azure.core.pipeline.PipelineRequest
:return: Returns the PipelineResponse or raises error if maximum retries exceeded.
:rtype: :class:`~azure.core.pipeline.PipelineResponse`
:rtype: ~azure.core.pipeline.PipelineResponse
:raises: ~azure.core.exceptions.AzureError if maximum retries exceeded.
:raises: ~azure.core.exceptions.ClientAuthenticationError if authentication
"""
@ -190,8 +217,8 @@ class TablesRetryPolicy(RetryPolicy):
is_response_error = True
while retry_active:
start_time = time.time()
try:
start_time = time.time()
self._configure_timeout(request, absolute_timeout, is_response_error)
response = self.next.send(request)
if self.is_retry(retry_settings, response):

Просмотреть файл

@ -8,4 +8,4 @@
from ._version import VERSION
SDK_MONIKER = "data-tables/{}".format(VERSION)
SDK_MONIKER = f"data-tables/{VERSION}"

Просмотреть файл

@ -4,14 +4,13 @@
# license information.
# --------------------------------------------------------------------------
from binascii import hexlify
from typing import Dict
from typing import Dict, Optional, Union
from uuid import UUID
from datetime import datetime
from math import isnan
from enum import Enum
from azure.core import MatchConditions
from azure.core.exceptions import raise_with_traceback
from ._entity import EdmType
from ._common_conversion import _encode_base64, _to_utc_datetime
@ -27,26 +26,36 @@ def _get_match_headers(etag, match_condition):
if etag:
raise ValueError("Etag is not supported for an Unconditional operation.")
return "*"
raise ValueError("Unsupported match condition: {}".format(match_condition))
raise ValueError(f"Unsupported match condition: {match_condition}")
def _prepare_key(keyvalue):
"""Duplicate the single quote char to escape."""
def _prepare_key(keyvalue: str) -> str:
"""Duplicate the single quote char to escape.
:param keyvalue: A key value in table entity.
:type keyvalue: str
:return: A key value in table entity.
:rtype: str
"""
try:
return keyvalue.replace("'", "''")
except AttributeError:
raise TypeError('PartitionKey or RowKey must be of type string.')
except AttributeError as exc:
raise TypeError('PartitionKey or RowKey must be of type string.') from exc
def _parameter_filter_substitution(parameters: Dict[str, str], query_filter: str) -> str:
"""Replace user defined parameter in filter
"""Replace user defined parameters in filter.
:param parameters: User defined parameters
:type parameters: dict[str, str]
:param str query_filter: Filter for querying
:return: A query filter replaced by user defined parameters.
:rtype: str
"""
if parameters:
filter_strings = query_filter.split(' ')
for index, word in enumerate(filter_strings):
if word[0] == u'@':
if word[0] == '@':
val = parameters[word[1:]]
if val in [True, False]:
filter_strings[index] = str(val).lower()
@ -56,18 +65,18 @@ def _parameter_filter_substitution(parameters: Dict[str, str], query_filter: str
if val.bit_length() <= 32:
filter_strings[index] = str(val)
else:
filter_strings[index] = "{}L".format(str(val))
filter_strings[index] = f"{str(val)}L"
elif isinstance(val, datetime):
filter_strings[index] = "datetime'{}'".format(_to_utc_datetime(val))
filter_strings[index] = f"datetime'{_to_utc_datetime(val)}'"
elif isinstance(val, UUID):
filter_strings[index] = "guid'{}'".format(str(val))
filter_strings[index] = f"guid'{str(val)}'"
elif isinstance(val, bytes):
v = str(hexlify(val))
if v[0] == 'b': # Python 3 adds a 'b' and quotations, python 2.7 does neither
v = v[2:-1]
filter_strings[index] = "X'{}'".format(v)
filter_strings[index] = f"X'{v}'"
else:
filter_strings[index] = "'{}'".format(_prepare_key(val))
filter_strings[index] = f"'{_prepare_key(val)}'"
return ' '.join(filter_strings)
return query_filter
@ -197,6 +206,11 @@ def _add_entity_properties(source):
"PartitionKey":"my_partition_key",
"RowKey":"my_row_key"
}
:param source: A table entity.
:type source: ~azure.data.tables.TableEntity or Mapping[str, Any]
:return: An entity with property's metadata in JSON format.
:rtype: Mapping[str, Any]
"""
properties = {}
@ -238,12 +252,15 @@ def _add_entity_properties(source):
return properties
def serialize_iso(attr):
def serialize_iso(attr: Optional[Union[str, datetime]]) -> Optional[str]:
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises ValueError: If format is invalid.
:param attr: Object to be serialized.
:type attr: str or ~datetime.datetime or None
:return: A ISO-8601 formatted string or None
:rtype: str or None
:raises ValueError: When unable to serialize the input object.
:raises TypeError: When ISO-8601 object is an invalid datetime object.
"""
if not attr:
return None
@ -255,14 +272,9 @@ def serialize_iso(attr):
if utc.tm_year > 9999 or utc.tm_year < 1:
raise OverflowError("Hit max or min date")
date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
# cspell:disable-next-line
utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
)
date = f"{utc.tm_year:04}-{utc.tm_mon:02}-{utc.tm_mday:02}T{utc.tm_hour:02}:{utc.tm_min:02}:{utc.tm_sec:02}"
return date + "Z"
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
raise_with_traceback(ValueError, msg, err)
raise ValueError("Unable to serialize datetime object.") from err
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
raise_with_traceback(TypeError, msg, err)
raise TypeError("ISO-8601 object must be valid datetime object.") from err

Просмотреть файл

@ -26,14 +26,17 @@ class SharedAccessSignature(object):
signature tokens with an account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
:ivar str account_name: The name of the Tables account.
:ivar str account_key: The key of the Tables account.
:ivar str x_ms_version: The service version used to generate the shared access signatures.
"""
def __init__(self, credential, x_ms_version=DEFAULT_X_MS_VERSION):
"""
:param credential: The credential used for authenticating requests
:type credential: :class:`~azure.core.credentials.NamedKeyCredential`
:param str x_ms_version:
The service version used to generate the shared access signatures.
:type credential: ~azure.core.credentials.AzureNamedKeyCredential
:param str x_ms_version: The service version used to generate the shared access signatures.
"""
self.account_name = credential.named_key.name
self.account_key = credential.named_key.key
@ -43,7 +46,7 @@ class SharedAccessSignature(object):
self,
services: str,
resource_types: ResourceTypes,
permission: Union[AccountSasPermissions, str],
permission: AccountSasPermissions,
expiry: Union[datetime, str],
start: Optional[Union[datetime, str]] = None,
ip_address_or_range: Optional[str] = None,
@ -61,7 +64,7 @@ class SharedAccessSignature(object):
Specifies the resource types that are accessible with the account
SAS. You can combine values to provide access to more than one
resource type.
:param AccountSasPermissions permission:
:param ~azure.data.tables.AccountSasPermissions permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
@ -75,23 +78,27 @@ class SharedAccessSignature(object):
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str ip_address_or_range:
:type start: ~datetime.datetime or str or None
:param ip_address_or_range:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param Union[str, SASProtocol] protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.cosmosdb.table.common.models.Protocol` for possible values.
:type ip_address_or_range: str or None
:param protocol:
Specifies the protocol permitted for a request made.
See :class:`~azure.data.tables.SASProtocol` for possible values.
:type protocol: str or ~azure.data.tables.SASProtocol or None
:return: A shared access signature for the account.
:rtype: str
"""
sas = _SharedAccessHelper()
sas.add_base(
@ -268,7 +275,7 @@ class _SharedAccessHelper(object):
def get_token(self) -> str:
return "&".join(
[
"{0}={1}".format(n, url_quote(v))
f"{n}={url_quote(v)}"
for n, v in self.query_dict.items()
if v is not None
]

Просмотреть файл

@ -40,6 +40,9 @@ class TableBatchOperations(object):
supported within a single transaction. The batch can include at most 100
entities, and its total payload may be no more than 4 MB in size.
:ivar str table_name: The name of the table.
:ivar requests: A list of :class:`~azure.core.pipeline.transport.HttpRequest` in a batch.
:vartype requests: list[~azure.core.pipeline.transport.HttpRequest]
"""
def __init__(
@ -51,22 +54,21 @@ class TableBatchOperations(object):
table_name: str,
is_cosmos_endpoint: bool = False,
**kwargs
):
) -> None:
"""Create TableClient from a Credential.
:param client: an AzureTable object
:type client: AzureTable
:param serializer: serializer object for request serialization
:param client: An AzureTable object.
:type client: ~azure.data.tables._generated.AzureTable
:param serializer: A Serializer object for request serialization.
:type serializer: ~azure.data.tables._generated._serialization.Serializer
:param deserializer: deserializer object for request serialization
:param deserializer: A Deserializer object for request deserialization.
:type deserializer: ~azure.data.tables._generated._serialization.Deserializer
:param config: Azure Table Configuration object
:type config: AzureTableConfiguration
:param table_name: name of the Table to perform operations on
:param config: An AzureTableConfiguration object.
:type config: ~azure.data.tables._generated._configuration.AzureTableConfiguration
:param table_name: The name of the Table to perform operations on.
:type table_name: str
:param table_client: TableClient object to perform operations on
:type table_client: TableClient
:param is_cosmos_endpoint: True if the client endpoint is for Tables Cosmos. False if not. Default is False.
:type is_cosmos_endpoint: bool
:returns: None
"""
self._client = client
@ -89,23 +91,30 @@ class TableBatchOperations(object):
raise ValueError("Partition Keys must all be the same")
def add_operation(self, operation: TransactionOperationType) -> None:
"""Add a single operation to a batch."""
"""Add a single operation to a batch.
:param operation: An operation include operation type and entity, may with kwargs.
:type operation: A tuple of ~azure.data.tables.TransactionOperation or str, and
~azure.data.tables.TableEntity or Mapping[str, Any]. Or a tuple of
~azure.data.tables.TransactionOperation or str, and
~azure.data.tables.TableEntity or Mapping[str, Any], and Mapping[str, Any]
:return: None
"""
try:
operation_type, entity, kwargs = operation # type: ignore
except ValueError:
operation_type, entity, kwargs = operation[0], operation[1], {} # type: ignore
try:
getattr(self, operation_type.lower())(entity, **kwargs)
except AttributeError:
raise ValueError("Unrecognized operation: {}".format(operation))
except AttributeError as exc:
raise ValueError(f"Unrecognized operation: {operation}") from exc
def create(self, entity: EntityType, **kwargs) -> None:
"""Adds an insert operation to the current batch.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:return: None
:rtype: None
:raises ValueError:
.. admonition:: Example:
@ -149,18 +158,19 @@ class TableBatchOperations(object):
:param: entity:
The entity to insert. Can be a dict or an entity object
Must contain a PartitionKey and a RowKey.
:type: entity: Dict or :class:`~azure.data.tables.models.Entity`
:type: entity: dict[str, Any] or ~azure.data.tables.models.Entity
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
:type request_id_parameter: str or None
:param response_preference: Specifies the return format. Default is return without content.
:type response_preference: str or ~azure.data.tables.models.ResponseFormat
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
@ -227,14 +237,13 @@ class TableBatchOperations(object):
"""Adds an update operation to the current batch.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:keyword str etag: Etag of the entity
:keyword match_condition: MatchCondition
:paramtype match_condition: ~azure.core.MatchCondition
:return: None
:rtype: None
:raises ValueError:
.. admonition:: Example:
@ -284,7 +293,7 @@ class TableBatchOperations(object):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")
def _batch_update_entity(
self,
@ -307,23 +316,22 @@ class TableBatchOperations(object):
:param row_key: The row key of the entity.
:type row_key: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
:type request_id_parameter: str or None
:param if_match: Match condition for an entity to be updated. If specified and a matching
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a replace will be performed if an existing entity is found.
:type if_match: str
:type if_match: str or None
:param table_entity_properties: The properties for the table entity.
:type table_entity_properties: Dict[str, object]
:type table_entity_properties: dict[str, object] or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
:rtype: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json")
@ -411,23 +419,22 @@ class TableBatchOperations(object):
:param row_key: The row key of the entity.
:type row_key: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
:type request_id_parameter: str or None
:param if_match: Match condition for an entity to be updated. If specified and a matching
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a merge will be performed if an existing entity is found.
:type if_match: str
:type if_match: str or None
:param table_entity_properties: The properties for the table entity.
:type table_entity_properties: Dict[str, object]
:type table_entity_properties: dict[str, object] or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
:rtype: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json")
@ -498,14 +505,13 @@ class TableBatchOperations(object):
def delete(self, entity: EntityType, **kwargs) -> None:
"""Adds a delete operation to the current branch.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
param entity: The properties for the table entity.
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:keyword str etag: Etag of the entity
:keyword match_condition: MatchCondition
:paramtype match_condition: ~azure.core.MatchCondition
:raises ValueError:
:return: None
:raises: ValueError
.. admonition:: Example:
@ -564,16 +570,15 @@ class TableBatchOperations(object):
wildcard character (*).
:type if_match: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
:type request_id_parameter: str or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
:rtype: None
"""
data_service_version = "3.0"
accept = "application/json;odata=minimalmetadata"
@ -633,10 +638,11 @@ class TableBatchOperations(object):
"""Adds an upsert (update/merge) operation to the batch.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:raises ValueError:
:type mode: ~azure.data.tables.UpdateMode
:return: None
:raises: ValueError
.. admonition:: Example:
@ -671,4 +677,4 @@ class TableBatchOperations(object):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")

Просмотреть файл

@ -36,7 +36,9 @@ class TableClient(TablesBaseClient):
:ivar str account_name: The name of the Tables account.
:ivar str table_name: The name of the table.
:ivar str url: The full URL to the Tables account.
:ivar str scheme: The scheme component in the full URL to the Tables account.
:ivar str url: The storage endpoint.
:ivar str api_version: The service API version.
"""
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
@ -54,15 +56,14 @@ class TableClient(TablesBaseClient):
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or TokenCredentials from azure-identity.
AzureSasCredential (azure-core), or a TokenCredential implementation from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.TokenCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials.TokenCredential or None
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02". Note that overriding this default value may result in unsupported behavior.
is "2019-02-02".
:paramtype api_version: str
:returns: None
"""
if not table_name:
@ -73,8 +74,12 @@ class TableClient(TablesBaseClient):
def _format_url(self, hostname):
"""Format the endpoint URL according to the current location
mode hostname.
:param str hostname: The current location mode hostname.
:returns: The full URL to the Tables account.
:rtype: str
"""
return "{}://{}{}".format(self.scheme, hostname, self._query_str)
return f"{self.scheme}://{hostname}{self._query_str}"
@classmethod
def from_connection_string(cls, conn_str: str, table_name: str, **kwargs) -> "TableClient":
@ -83,7 +88,7 @@ class TableClient(TablesBaseClient):
:param str conn_str: A connection string to an Azure Tables account.
:param str table_name: The table name.
:returns: A table client.
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
.. admonition:: Example:
@ -110,34 +115,29 @@ class TableClient(TablesBaseClient):
:param str table_url: The full URI to the table, including SAS token if used.
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential
or AzureSasCredential from azure-core.
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or a TokenCredential implementation from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or None
:returns: A table client.
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
"""
try:
if not table_url.lower().startswith("http"):
table_url = "https://" + table_url
except AttributeError:
raise ValueError("Table URL must be a string.")
except AttributeError as exc:
raise ValueError("Table URL must be a string.") from exc
parsed_url = urlparse(table_url.rstrip("/"))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(table_url))
raise ValueError(f"Invalid URL: {table_url}")
table_path = parsed_url.path.lstrip("/").split("/")
account_path = ""
if len(table_path) > 1:
account_path = "/" + "/".join(table_path[:-1])
endpoint = "{}://{}{}?{}".format(
parsed_url.scheme,
parsed_url.netloc.rstrip("/"),
account_path,
parsed_url.query,
)
endpoint = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}"
table_name = unquote(table_path[-1])
if table_name.lower().startswith("tables('"):
table_name = table_name[8:-2]
@ -151,7 +151,7 @@ class TableClient(TablesBaseClient):
used with Shared Access Signatures.
:return: Dictionary of SignedIdentifiers
:rtype: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]]
:rtype: dict[str, ~azure.data.tables.TableAccessPolicy] or dict[str, None]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
timeout = kwargs.pop("timeout", None)
@ -183,9 +183,8 @@ class TableClient(TablesBaseClient):
"""Sets stored access policies for the table that may be used with Shared Access Signatures.
:param signed_identifiers: Access policies to set for the table
:type signed_identifiers: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]]
:type signed_identifiers: dict[str, ~azure.data.tables.TableAccessPolicy] or dict[str, None]
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
identifiers = []
@ -212,7 +211,7 @@ class TableClient(TablesBaseClient):
"""Creates a new table under the current account.
:return: A TableItem representing the created table.
:rtype: :class:`~azure.data.tables.TableItem`
:rtype: ~azure.data.tables.TableItem
:raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists
.. admonition:: Example:
@ -241,7 +240,6 @@ class TableClient(TablesBaseClient):
if the table does not exist
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -287,7 +285,6 @@ class TableClient(TablesBaseClient):
The default value is Unconditionally.
:paramtype match_condition: ~azure.core.MatchConditions
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -339,13 +336,13 @@ class TableClient(TablesBaseClient):
_process_table_error(error, table_name=self.table_name)
@distributed_trace
def create_entity(self, entity: EntityType, **kwargs) -> Dict[str, str]:
def create_entity(self, entity: EntityType, **kwargs) -> Dict[str, Any]:
"""Insert entity in a table.
:param entity: The properties for the table entity.
:type entity: Union[TableEntity, Mapping[str, Any]]
:return: Dictionary mapping operation metadata returned from the service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -369,28 +366,28 @@ class TableClient(TablesBaseClient):
decoded = _decode_error(error.response, error.message)
if decoded.error_code == "PropertiesNeedValue":
if entity.get("PartitionKey") is None:
raise ValueError("PartitionKey must be present in an entity")
raise ValueError("PartitionKey must be present in an entity") from error
if entity.get("RowKey") is None:
raise ValueError("RowKey must be present in an entity")
raise ValueError("RowKey must be present in an entity") from error
_validate_tablename_error(decoded, self.table_name)
_reraise_error(error)
return _trim_service_metadata(metadata, content=content) # type: ignore
@distributed_trace
def update_entity(self, entity: EntityType, mode: UpdateMode = UpdateMode.MERGE, **kwargs) -> Dict[str, str]:
def update_entity(self, entity: EntityType, mode: UpdateMode = UpdateMode.MERGE, **kwargs) -> Dict[str, Any]:
"""Update entity in a table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:keyword str etag: Etag of the entity
:keyword match_condition: The condition under which to perform the operation.
Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally.
The default value is Unconditionally.
:paramtype match_condition: ~azure.core.MatchConditions
:return: Dictionary mapping operation metadata returned from the service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -440,7 +437,7 @@ class TableClient(TablesBaseClient):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")
except HttpResponseError as error:
_process_table_error(error, table_name=self.table_name)
return _trim_service_metadata(metadata, content=content) # type: ignore
@ -451,9 +448,9 @@ class TableClient(TablesBaseClient):
:keyword int results_per_page: Number of entities returned per service request.
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:return: ItemPaged[:class:`~azure.data.tables.TableEntity`]
:rtype: ~azure.core.paging.ItemPaged
:paramtype select: str or list[str]
:return: An iterator of :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableEntity]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -488,11 +485,11 @@ class TableClient(TablesBaseClient):
on filter formatting, see the `samples documentation <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/tables/azure-data-tables/samples#writing-filters>`_.
:keyword int results_per_page: Number of entities returned per service request.
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:paramtype select: str or list[str]
:keyword parameters: Dictionary for formatting query with additional, user defined parameters
:paramtype parameters: Dict[str, Any]
:return: ItemPaged[:class:`~azure.data.tables.TableEntity`]
:rtype: ~azure.core.paging.ItemPaged
:paramtype parameters: dict[str, Any]
:return: An iterator of :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableEntity]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -530,9 +527,9 @@ class TableClient(TablesBaseClient):
:param row_key: The row key of the entity.
:type row_key: str
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:paramtype select: str or list[str]
:return: Dictionary mapping operation metadata returned from the service
:rtype: :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.data.tables.TableEntity
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -560,15 +557,15 @@ class TableClient(TablesBaseClient):
return _convert_to_entity(entity)
@distributed_trace
def upsert_entity(self, entity: EntityType, mode: UpdateMode = UpdateMode.MERGE, **kwargs) -> Dict[str, str]:
def upsert_entity(self, entity: EntityType, mode: UpdateMode = UpdateMode.MERGE, **kwargs) -> Dict[str, Any]:
"""Update/Merge or Insert entity into table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:return: Dictionary mapping operation metadata returned from the service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -606,15 +603,13 @@ class TableClient(TablesBaseClient):
)
else:
raise ValueError(
"""Update mode {} is not supported.
For a list of supported modes see the UpdateMode enum""".format(
mode
)
f"Update mode {mode} is not supported. For a list of supported modes see the UpdateMode enum."
)
except HttpResponseError as error:
_process_table_error(error, table_name=self.table_name)
return _trim_service_metadata(metadata, content=content) # type: ignore
@distributed_trace
def submit_transaction(
self, operations: Iterable[TransactionOperationType], **kwargs
) -> List[Mapping[str, Any]]:
@ -631,7 +626,7 @@ class TableClient(TablesBaseClient):
:type operations: Iterable[Tuple[str, TableEntity, Mapping[str, Any]]]
:return: A list of mappings with response metadata for each operation in the transaction.
:rtype: List[Mapping[str, Any]]
:rtype: list[Mapping[str, Any]]
:raises: :class:`~azure.data.tables.TableTransactionError`
.. admonition:: Example:
@ -655,9 +650,9 @@ class TableClient(TablesBaseClient):
try:
for operation in operations:
batched_requests.add_operation(operation)
except TypeError:
except TypeError as exc:
raise TypeError(
"The value of 'operations' must be an iterator "
"of Tuples. Please check documentation for correct Tuple format."
)
) from exc
return self._batch_send(self.table_name, *batched_requests.requests, **kwargs) # type: ignore

Просмотреть файл

@ -42,37 +42,41 @@ class TableServiceClient(TablesBaseClient):
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or TokenCredentials from azure-identity.
AzureSasCredential (azure-core), or a TokenCredential implementation from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.TokenCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials.TokenCredential or None
:keyword str api_version:
The Storage API version to use for requests. Default value is '2019-02-02'.
Setting to an older version may result in reduced feature compatibility.
.. admonition:: Example:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_authentication.py
:start-after: [START auth_from_sas]
:end-before: [END auth_from_sas]
:language: python
:dedent: 8
:caption: Authenticating a TableServiceClient from a Shared Access Key
.. literalinclude:: ../samples/sample_authentication.py
:start-after: [START auth_from_sas]
:end-before: [END auth_from_sas]
:language: python
:dedent: 8
:caption: Authenticating a TableServiceClient from a Shared Access Key
.. literalinclude:: ../samples/sample_authentication.py
:start-after: [START auth_from_shared_key]
:end-before: [END auth_from_shared_key]
:language: python
:dedent: 8
:caption: Authenticating a TableServiceClient from a Shared Account Key
"""
.. literalinclude:: ../samples/sample_authentication.py
:start-after: [START auth_from_shared_key]
:end-before: [END auth_from_shared_key]
:language: python
:dedent: 8
:caption: Authenticating a TableServiceClient from a Shared Account Key
"""
def _format_url(self, hostname: str) -> str:
"""Format the endpoint URL according to the current location
mode hostname.
:param str hostname: The current location mode hostname.
:returns: The full URL to the Tables account.
:rtype: str
"""
return "{}://{}{}".format(self.scheme, hostname, self._query_str)
return f"{self.scheme}://{hostname}{self._query_str}"
@classmethod
def from_connection_string(cls, conn_str: str, **kwargs) -> 'TableServiceClient':
@ -80,7 +84,7 @@ class TableServiceClient(TablesBaseClient):
:param str conn_str: A connection string to an Azure Storage or Cosmos account.
:returns: A Table service client.
:rtype: :class:`~azure.data.tables.TableServiceClient`
:rtype: ~azure.data.tables.TableServiceClient
.. admonition:: Example:
@ -102,7 +106,7 @@ class TableServiceClient(TablesBaseClient):
location endpoint when read-access geo-redundant replication is enabled for the account.
:return: Dictionary of service stats
:rtype: Dict[str, object]
:rtype: dict[str, object]
:raises: :class:`~azure.core.exceptions.HttpResponseError:`
"""
try:
@ -120,7 +124,7 @@ class TableServiceClient(TablesBaseClient):
including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
:return: Dictionary of service properties
:rtype: Dict[str, object]
:rtype: dict[str, object]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
timeout = kwargs.pop("timeout", None)
@ -146,9 +150,8 @@ class TableServiceClient(TablesBaseClient):
:keyword minute_metrics: Minute level metrics
:paramtype minute_metrics: ~azure.data.tables.TableMetrics
:keyword cors: Cross-origin resource sharing rules
:paramtype cors: List[~azure.data.tables.TableCorsRule]
:paramtype cors: list[~azure.data.tables.TableCorsRule]
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
cors = kwargs.pop('cors', None)
@ -176,7 +179,7 @@ class TableServiceClient(TablesBaseClient):
:param table_name: The Table name.
:type table_name: str
:return: TableClient
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
:raises: :class:`~azure.core.exceptions.ResourceExistsError`
.. admonition:: Example:
@ -201,7 +204,7 @@ class TableServiceClient(TablesBaseClient):
:param table_name: The Table name.
:type table_name: str
:return: TableClient
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -228,7 +231,6 @@ class TableServiceClient(TablesBaseClient):
:param table_name: The Table name.
:type table_name: str
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -250,9 +252,9 @@ class TableServiceClient(TablesBaseClient):
:param str query_filter: Specify a filter to return certain tables.
:keyword int results_per_page: Number of tables per page in return ItemPaged
:keyword parameters: Dictionary for formatting query with additional, user defined parameters
:paramtype parameters: Dict[str, Any]
:return: ItemPaged[:class:`~azure.data.tables.TableItem`]
:rtype: ~azure.core.paging.ItemPaged
:paramtype parameters: dict[str, Any]
:return: An iterator of :class:`~azure.data.tables.TableItem`
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableItem]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -283,8 +285,8 @@ class TableServiceClient(TablesBaseClient):
"""Queries tables under the given account.
:keyword int results_per_page: Number of tables per page in returned ItemPaged
:return: ItemPaged[:class:`~azure.data.tables.TableItem`]
:rtype: ~azure.core.paging.ItemPaged
:return: An iterator of :class:`~azure.data.tables.TableItem`
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableItem]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -312,7 +314,7 @@ class TableServiceClient(TablesBaseClient):
:param str table_name: The table name
:returns: A :class:`~azure.data.tables.TableClient` object.
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
"""
pipeline = Pipeline( # type: ignore

Просмотреть файл

@ -4,10 +4,10 @@
# license information.
# --------------------------------------------------------------------------
from datetime import datetime
from typing import Union
from typing import Optional, Union
from azure.core.credentials import AzureNamedKeyCredential
from ._models import AccountSasPermissions, ResourceTypes
from ._models import AccountSasPermissions, TableSasPermissions, ResourceTypes, SASProtocol
from ._common_conversion import _sign_string
from ._error import _validate_not_none
from ._constants import X_MS_VERSION
@ -30,7 +30,7 @@ def generate_account_sas(
Use the returned signature with the sas_token parameter of TableService.
:param credential: Credential for the Azure account
:type credential: :class:`~azure.core.credentials.AzureNamedKeyCredential`
:type credential: ~azure.core.credentials.AzureNamedKeyCredential
:param resource_types:
Specifies the resource types that are accessible with the account SAS.
:type resource_types: ResourceTypes
@ -48,14 +48,14 @@ def generate_account_sas(
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:type expiry: ~datetime.datetime or str
:keyword start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:paramtype start: datetime or str
:paramtype start: ~datetime.datetime or str
:keyword str ip_address_or_range:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
@ -64,19 +64,19 @@ def generate_account_sas(
restricts the request to those IP addresses.
:keyword protocol:
Specifies the protocol permitted for a request made.
:paramtype protocol: str or SASProtocol
:paramtype protocol: str or ~azure.data.tables.SASProtocol
:return: A Shared Access Signature (sas) token.
:rtype: str
"""
_validate_not_none("account_name", credential.named_key.name)
_validate_not_none("account_key", credential.named_key.key)
if permission is str:
permission = AccountSasPermissions.from_string(permission=permission) # type: ignore
permission = AccountSasPermissions.from_string(permission=permission) # type: ignore[arg-type]
sas = TableSharedAccessSignature(credential)
return sas.generate_account(
"t",
resource_types,
permission,
permission, # type: ignore[arg-type]
expiry,
start=kwargs.pop("start", None),
ip_address_or_range=kwargs.pop("ip_address_or_range", None),
@ -91,10 +91,10 @@ def generate_table_sas(credential: AzureNamedKeyCredential, table_name: str, **k
:param credential: Credential used for creating Shared Access Signature
:type credential: :class:`~azure.core.credentials.AzureNamedKeyCredential`
:type credential: ~azure.core.credentials.AzureNamedKeyCredential
:param table_name: Table name
:type table_name: str
:keyword TableSasPermissions permission:
:keyword ~azure.data.tables.TableSasPermissions permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
@ -107,14 +107,14 @@ def generate_table_sas(credential: AzureNamedKeyCredential, table_name: str, **k
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:paramtype expiry: datetime or str
:paramtype expiry: ~datetime.datetime or str
:keyword start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:paramtype start: datetime or str
:paramtype start: ~datetime.datetime or str
:keyword str ip_address_or_range:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
@ -124,11 +124,11 @@ def generate_table_sas(credential: AzureNamedKeyCredential, table_name: str, **k
:keyword str policy_id: Access policy ID.
:keyword protocol:
Specifies the protocol permitted for a request made.
:paramtype protocol: str or SASProtocol
:keyword str end_rk: End row key
:keyword str end_pk: End partition key
:paramtype protocol: str or ~azure.data.tables.SASProtocol
:keyword str start_rk: Starting row key
:keyword str start_pk: Starting partition key
:keyword str end_rk: End row key
:keyword str end_pk: End partition key
:return: A Shared Access Signature (sas) token.
:rtype: str
"""
@ -156,13 +156,12 @@ class TableSharedAccessSignature(SharedAccessSignature):
signature tokens with a common account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
:param credential: The credential used for authenticating requests.
:type credential: ~azure.core.credentials.AzureNamedKeyCredential
"""
def __init__(self, credential):
"""
:param credential: The credential used for authenticating requests
:type credential: :class:`~azure.core.credentials.NamedKeyCredential`
"""
def __init__(self, credential: AzureNamedKeyCredential):
super(TableSharedAccessSignature, self).__init__(
credential, x_ms_version=X_MS_VERSION
)
@ -170,30 +169,31 @@ class TableSharedAccessSignature(SharedAccessSignature):
def generate_table(
self,
table_name,
permission=None,
expiry=None,
start=None,
policy_id=None,
ip_address_or_range=None,
protocol=None,
start_pk=None,
start_rk=None,
end_pk=None,
end_rk=None,
permission: Optional[TableSasPermissions] = None,
expiry: Optional[Union[datetime, str]] = None,
start: Optional[Union[datetime, str]] = None,
policy_id: Optional[str] = None,
ip_address_or_range: Optional[str] = None,
protocol: Optional[Union[str, SASProtocol]] = None,
start_pk: Optional[str] = None,
start_rk: Optional[str] = None,
end_pk: Optional[str] = None,
end_rk: Optional[str] = None,
**kwargs # pylint: disable=unused-argument
):
) -> str:
"""
Generates a shared access signature for the table.
Use the returned signature with the sas_token parameter of TableService.
:param str table_name:
Name of table.
:param TablePermissions permission:
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: ~azure.data.table.TableSasPermissions or None
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
@ -201,46 +201,55 @@ class TableSharedAccessSignature(SharedAccessSignature):
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:type expiry: ~datetime.datetime or str or None
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str policy_id:
:type start: ~datetime.datetime or str or None
:param policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_table_service_properties.
:param str ip_address_or_range:
:type policy_id: str or None
:param ip_address_or_range:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.cosmosdb.table.common.models.Protocol` for possible values.
:param str start_pk:
:type ip_address_or_range: str or None
:param protocol:
Specifies the protocol permitted for a request made.
See :class:`~azure.data.tables.SASProtocol` for possible values.
:type protocol: str or ~azure.data.tables.SASProtocol or None
:param start_pk:
The minimum partition key accessible with this shared access
signature. startpk must accompany start_rk. Key values are inclusive.
If omitted, there is no lower bound on the table entities that can
be accessed.
:param str start_rk:
:type start_pk: str or None
:param start_rk:
The minimum row key accessible with this shared access signature.
startpk must accompany start_rk. Key values are inclusive. If
omitted, there is no lower bound on the table entities that can be
accessed.
:param str end_pk:
:type start_rk: str or None
:param end_pk:
The maximum partition key accessible with this shared access
signature. end_pk must accompany end_rk. Key values are inclusive. If
omitted, there is no upper bound on the table entities that can be
accessed.
:param str end_rk:
:type end_pk: str or None
:param end_rk:
The maximum row key accessible with this shared access signature.
end_pk must accompany end_rk. Key values are inclusive. If omitted,
there is no upper bound on the table entities that can be accessed.
:type end_rk: str or None
:return: A shared access signature for the table.
:rtype: str
"""
sas = _TableSharedAccessHelper()
sas.add_base(

Просмотреть файл

@ -21,12 +21,11 @@ class AsyncBearerTokenChallengePolicy(AsyncBearerTokenCredentialPolicy):
authentication challenges.
:param credential: The credential.
:type credential: ~azure.core.AsyncTokenCredential
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param str scopes: Lets you specify the type of access needed.
:keyword bool discover_tenant: Determines if tenant discovery should be enabled. Defaults to True.
:keyword bool discover_scopes: Determines if scopes from authentication challenges should be provided to token
requests, instead of the scopes given to the policy's constructor, if any are present. Defaults to True.
:raises: :class:`~azure.core.exceptions.ServiceRequestError`
"""
def __init__(
@ -49,6 +48,7 @@ class AsyncBearerTokenChallengePolicy(AsyncBearerTokenCredentialPolicy):
:param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge
:param ~azure.core.pipeline.PipelineResponse response: the resource provider's response
:returns: a bool indicating whether the policy should send the request
:rtype: bool
"""
if not self._discover_tenant and not self._discover_scopes:
# We can't discover the tenant or use a different scope; the request will fail because it hasn't changed
@ -119,5 +119,5 @@ def _configure_credential(
if isinstance(credential, AzureNamedKeyCredential):
return SharedKeyCredentialPolicy(credential)
if credential is not None:
raise TypeError("Unsupported credential: {}".format(credential))
raise TypeError(f"Unsupported credential: {credential}")
return None

Просмотреть файл

@ -41,18 +41,10 @@ from ._policies_async import AsyncTablesRetryPolicy
class AsyncTablesBaseClient(AccountHostsMixin):
"""Base class for TableClient
:param str endpoint: A URL to an Azure Tables account.
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or AsyncTokenCredential from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.AsyncTokenCredential`
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02". Note that overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:ivar str account_name: The name of the Tables account.
:ivar str scheme: The scheme component in the full URL to the Tables account.
:ivar str url: The storage endpoint.
:ivar str api_version: The service API version.
"""
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
@ -62,6 +54,21 @@ class AsyncTablesBaseClient(AccountHostsMixin):
credential: Optional[Union[AzureSasCredential, AzureNamedKeyCredential, AsyncTokenCredential]] = None,
**kwargs
) -> None:
"""Create TablesBaseClient from a Credential.
:param str endpoint: A URL to an Azure Tables account.
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or an AsyncTokenCredential implementation from azure-identity.
:paramtype credential:
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials_async.AsyncTokenCredential or None
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02".
:paramtype api_version: str
"""
super(AsyncTablesBaseClient, self).__init__(endpoint, credential=credential, **kwargs) # type: ignore
self._client = AzureTable(
self.url,
@ -102,16 +109,25 @@ class AsyncTablesBaseClient(AccountHostsMixin):
]
async def _batch_send(self, table_name: str, *reqs: HttpRequest, **kwargs) -> List[Mapping[str, Any]]:
"""Given a series of request, do a Storage batch call."""
# pylint:disable=docstring-should-be-keyword
"""Given a series of request, do a Storage batch call.
:param table_name: The table name.
:type table_name: str
:param reqs: The HTTP request.
:type reqs: ~azure.core.pipeline.transport.HttpRequest
:return: A list of batch part metadata in response.
:rtype: list[Mapping[str, Any]]
"""
# Pop it here, so requests doesn't feel bad about additional kwarg
policies = [StorageHeadersPolicy()]
changeset = HttpRequest("POST", None) # type: ignore
changeset.set_multipart_mixed(
*reqs, policies=policies, boundary="changeset_{}".format(uuid4())
*reqs, policies=policies, boundary=f"changeset_{uuid4()}"
)
request = self._client._client.post( # pylint: disable=protected-access
url="{}://{}/$batch".format(self.scheme, self._primary_hostname),
url=f"{self.scheme}://{self._primary_hostname}/$batch",
headers={
"x-ms-version": self.api_version,
"DataServiceVersion": "3.0",
@ -124,7 +140,7 @@ class AsyncTablesBaseClient(AccountHostsMixin):
changeset,
policies=policies,
enforce_https=False,
boundary="batch_{}".format(uuid4()),
boundary=f"batch_{uuid4()}",
)
pipeline_response = await self._client._client._pipeline.run(request, **kwargs) # pylint: disable=protected-access
@ -164,6 +180,9 @@ class AsyncTransportWrapper(AsyncHttpTransport):
"""Wrapper class that ensures that an inner client created
by a `get_client` method does not close the outer transport for the parent
when used in a context manager.
:param async_transport: The async Http Transport instance.
:type async_transport: ~azure.core.pipeline.transport.AsyncHttpTransport
"""
def __init__(self, async_transport):
self._transport = async_transport

Просмотреть файл

@ -21,37 +21,48 @@ class AsyncTablesRetryPolicy(AsyncRetryPolicy):
The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis.
:keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host
location. Default value is False.
:keyword int retry_total: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:keyword int retry_connect: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:keyword int retry_read: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:keyword int retry_status: How many times to retry on bad status codes. Default value is 3.
:keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}.
In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))`
seconds. If the backoff_factor is 0.1, then the retry will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8.
:keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
:keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential.
:keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days).
:ivar int total_retries: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:ivar int connect_retries: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:ivar int read_retries: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:ivar int status_retries: How many times to retry on bad status codes. Default value is 3.
:ivar float backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}. In 'exponential' mode,
retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds.
If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries.
The default value is 0.8.
:ivar int backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
:ivar retry_mode: Fixed or exponential delay between attemps, default is exponential.
:vartype retry_mode: ~azure.core.pipeline.policies.RetryMode
:ivar int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days).
"""
def __init__(self, **kwargs):
"""
:keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host
location. Default value is False.
:keyword int retry_total: Total number of retries to allow. Takes precedence over other counts.
Default value is 10.
:keyword int retry_connect: How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request. Default value is 3.
:keyword int retry_read: How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects. Default value is 3.
:keyword int retry_status: How many times to retry on bad status codes. Default value is 3.
:keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a delay).
In fixed mode, retry policy will always sleep for {backoff factor}. In 'exponential' mode,
retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds.
If the retry_backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries.
The default value is 0.8.
:keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes).
"""
super(AsyncTablesRetryPolicy, self).__init__(**kwargs)
self.retry_to_secondary = kwargs.get('retry_to_secondary', False)
@ -61,6 +72,13 @@ class AsyncTablesRetryPolicy(AsyncRetryPolicy):
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
:param settings: The retry settings.
:type settings: dict[str, Any]
:param response: The PipelineResponse object
:type response: ~azure.core.pipeline.PipelineResponse
:return: True if method/status code is retryable. False if not retryable.
:rtype: bool
"""
should_retry = super(AsyncTablesRetryPolicy, self).is_retry(settings, response)
status = response.http_response.status_code
@ -73,8 +91,9 @@ class AsyncTablesRetryPolicy(AsyncRetryPolicy):
"""Configures the retry settings.
:param options: keyword arguments from context.
:type options: dict[str, Any]
:return: A dict containing settings and history for retries.
:rtype: Dict
:rtype: dict[str, Any]
"""
config = super(AsyncTablesRetryPolicy, self).configure_retries(options)
config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary)
@ -93,11 +112,13 @@ class AsyncTablesRetryPolicy(AsyncRetryPolicy):
super(AsyncTablesRetryPolicy, self).update_context(context, retry_settings)
context['location_mode'] = retry_settings['mode']
def update_request(self, request, retry_settings): # pylint: disable=no-self-use
def update_request(self, request, retry_settings):
"""Updates the pipeline request before attempting to retry.
:param PipelineRequest request: The outgoing request.
:param Dict(str, Any) retry_settings: The current retry context settings.
:param request: The outgoing request.
:type request: ~azure.core.pipeline.PipelineRequest
:param retry_settings: The current retry context settings.
:type retry_settings: dict[str, Any]
"""
set_next_host_location(retry_settings, request)
@ -118,8 +139,8 @@ class AsyncTablesRetryPolicy(AsyncRetryPolicy):
is_response_error = True
while retry_active:
start_time = time.time()
try:
start_time = time.time()
self._configure_timeout(request, absolute_timeout, is_response_error)
response = await self.next.send(request)
if self.is_retry(retry_settings, response):

Просмотреть файл

@ -31,6 +31,9 @@ class TableBatchOperations(object):
supported within a single transaction. The batch can include at most 100
entities, and its total payload may be no more than 4 MB in size.
:ivar str table_name: The name of the table.
:ivar requests: A list of :class:`~azure.core.pipeline.transport.HttpRequest` in a batch.
:vartype requests: list[~azure.core.pipeline.transport.HttpRequest]
"""
def __init__(
@ -43,6 +46,22 @@ class TableBatchOperations(object):
is_cosmos_endpoint: bool = False,
**kwargs
) -> None:
"""Create TableClient from a Credential.
:param client: An AzureTable object.
:type client: ~azure.data.tables._generated.aio.AzureTable
:param serializer: A Serializer object for request serialization.
:type serializer: ~azure.data.tables._generated._serialization.Serializer
:param deserializer: A Deserializer object for request deserialization.
:type deserializer: ~azure.data.tables._generated._serialization.Deserializer
:param config: An AzureTableConfiguration object.
:type config: ~azure.data.tables._generated.aio._configuration.AzureTableConfiguration
:param table_name: The name of the Table to perform operations on.
:type table_name: str
:param is_cosmos_endpoint: True if the client endpoint is for Tables Cosmos. False if not. Default is False.
:type is_cosmos_endpoint: bool
:return: None
"""
self._client = client
self._serialize = serializer
self._deserialize = deserializer
@ -65,15 +84,23 @@ class TableBatchOperations(object):
raise ValueError("Partition Keys must all be the same")
def add_operation(self, operation: TransactionOperationType) -> None:
"""Add a single operation to a batch."""
"""Add a single operation to a batch.
:param operation: An operation include operation type and entity, may with kwargs.
:type operation: A tuple of ~azure.data.tables.TransactionOperation or str, and
~azure.data.tables.TableEntity or Mapping[str, Any]. Or a tuple of
~azure.data.tables.TransactionOperation or str, and
~azure.data.tables.TableEntity or Mapping[str, Any], and Mapping[str, Any]
:return: None
"""
try:
operation_type, entity, kwargs = operation # type: ignore
except ValueError:
operation_type, entity, kwargs = *operation, {} # type: ignore
try:
getattr(self, operation_type.lower())(entity, **kwargs)
except AttributeError:
raise ValueError("Unrecognized operation: {}".format(operation))
except AttributeError as exc:
raise ValueError(f"Unrecognized operation: {operation}") from exc
def create(
self,
@ -83,10 +110,9 @@ class TableBatchOperations(object):
"""Insert entity in a table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:return: None
:rtype: None
:raises ValueError:
:raises ValueError: If PartitionKey and/or RowKey were not provided in entity.
.. admonition:: Example:
@ -129,18 +155,19 @@ class TableBatchOperations(object):
:param: entity:
The entity to insert. Can be a dict or an entity object
Must contain a PartitionKey and a RowKey.
:type: entity: Dict or :class:`~azure.data.tables.models.Entity`
:type: entity: dict[str, Any] or ~azure.data.tables.models.Entity
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str or None
:param response_preference: Specifies the return format. Default is return without content.
:type response_preference: str or ~azure.data.tables.models.ResponseFormat
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json;odata=nometadata")
@ -212,14 +239,13 @@ class TableBatchOperations(object):
"""Adds an update operation to the current batch.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:keyword str etag: Etag of the entity
:keyword match_condition: MatchCondition
:paramtype match_condition: ~azure.core.MatchCondition
:return: None
:rtype: None
:raises ValueError:
.. admonition:: Example:
@ -268,7 +294,7 @@ class TableBatchOperations(object):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")
def _batch_update_entity(
self,
@ -291,21 +317,22 @@ class TableBatchOperations(object):
:param row_key: The row key of the entity.
:type row_key: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str or None
:param if_match: Match condition for an entity to be updated. If specified and a matching
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a replace will be performed if an existing entity is found.
:type if_match: str
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a replace will be performed if an existing entity is found.
:type if_match: str or None
:param table_entity_properties: The properties for the table entity.
:type table_entity_properties: Dict[str, object]
:type table_entity_properties: dict[str, object] or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json")
@ -393,21 +420,22 @@ class TableBatchOperations(object):
:param row_key: The row key of the entity.
:type row_key: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str or None
:param if_match: Match condition for an entity to be updated. If specified and a matching
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a merge will be performed if an existing entity is found.
:type if_match: str
entity is not found, an error will be raised. To force an unconditional update, set to the
wildcard character (*). If not specified, an insert will be performed when no existing entity
is found to update and a merge will be performed if an existing entity is found.
:type if_match: str or None
:param table_entity_properties: The properties for the table entity.
:type table_entity_properties: Dict[str, object]
:type table_entity_properties: dict[str, object] or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
"""
data_service_version = "3.0"
content_type = kwargs.pop("content_type", "application/json")
@ -483,13 +511,12 @@ class TableBatchOperations(object):
) -> None:
"""Deletes the specified entity in a table.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
:param entity: The properties for the table entity.
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:keyword str etag: Etag of the entity
:keyword match_condition: MatchCondition
:paramtype match_condition: ~azure.core.MatchCondition
:return: None
.. admonition:: Example:
@ -548,14 +575,15 @@ class TableBatchOperations(object):
wildcard character (*).
:type if_match: str
:param timeout: The timeout parameter is expressed in seconds.
:type timeout: int
:type timeout: int or None
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when analytics logging is enabled.
:type request_id_parameter: str
:type request_id_parameter: str or None
:param format: Specifies the media type for the response. Known values are:
"application/json;odata=nometadata", "application/json;odata=minimalmetadata", and
"application/json;odata=fullmetadata".
:type format: str or ~azure.data.tables.models.OdataMetadataFormat
:type format: str or ~azure.data.tables.models.OdataMetadataFormat or None
:return: None
"""
data_service_version = "3.0"
accept = "application/json;odata=minimalmetadata"
@ -620,12 +648,11 @@ class TableBatchOperations(object):
"""Update/Merge or Insert entity into table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:return: None
:rtype: None
:raises ValueError:
:raises ValueError: If mode type is not supported.
.. admonition:: Example:
@ -660,4 +687,4 @@ class TableBatchOperations(object):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")

Просмотреть файл

@ -47,7 +47,9 @@ class TableClient(AsyncTablesBaseClient):
:ivar str account_name: The name of the Tables account.
:ivar str table_name: The name of the table.
:ivar str url: The full URL to the Tables account.
:ivar str scheme: The scheme component in the full URL to the Tables account.
:ivar str url: The storage endpoint.
:ivar str api_version: The service API version.
"""
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
@ -65,15 +67,14 @@ class TableClient(AsyncTablesBaseClient):
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or AsyncTokenCredential from azure-identity.
AzureSasCredential (azure-core), or an AsyncTokenCredential implementation from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.AsyncTokenCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials_async.AsyncTokenCredential or None
:keyword api_version: Specifies the version of the operation to use for this request. Default value
is "2019-02-02". Note that overriding this default value may result in unsupported behavior.
:paramtype api_version: str
:returns: None
"""
if not table_name:
@ -84,8 +85,12 @@ class TableClient(AsyncTablesBaseClient):
def _format_url(self, hostname):
"""Format the endpoint URL according to the current location
mode hostname.
:param str hostname: The current location mode hostname.
:returns: The full URL to the Tables account.
:rtype: str
"""
return "{}://{}{}".format(self.scheme, hostname, self._query_str)
return f"{self.scheme}://{hostname}{self._query_str}"
@classmethod
def from_connection_string(
@ -99,7 +104,7 @@ class TableClient(AsyncTablesBaseClient):
:param str conn_str: A connection string to an Azure Tables account.
:param str table_name: The table name.
:returns: A table client.
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
.. admonition:: Example:
@ -126,34 +131,28 @@ class TableClient(AsyncTablesBaseClient):
:param str table_url: The full URI to the table, including SAS token if used.
:keyword credential:
The credentials with which to authenticate. This is optional if the
table URL already has a SAS token. The value can be one of AzureNamedKeyCredential
or AzureSasCredential from azure-core.
table URL already has a SAS token.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or None
:returns: A table client.
:rtype: :class:`~azure.data.tables.TableClient`
:rtype: ~azure.data.tables.TableClient
"""
try:
if not table_url.lower().startswith("http"):
table_url = "https://" + table_url
except AttributeError:
raise ValueError("Table URL must be a string.")
except AttributeError as exc:
raise ValueError("Table URL must be a string.") from exc
parsed_url = urlparse(table_url.rstrip("/"))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(table_url))
raise ValueError(f"Invalid URL: {table_url}")
table_path = parsed_url.path.lstrip("/").split("/")
account_path = ""
if len(table_path) > 1:
account_path = "/" + "/".join(table_path[:-1])
endpoint = "{}://{}{}?{}".format(
parsed_url.scheme,
parsed_url.netloc.rstrip("/"),
account_path,
parsed_url.query,
)
endpoint = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}{account_path}?{parsed_url.query}"
table_name = unquote(table_path[-1])
if table_name.lower().startswith("tables('"):
table_name = table_name[8:-2]
@ -170,7 +169,7 @@ class TableClient(AsyncTablesBaseClient):
used with Shared Access Signatures.
:return: Dictionary of SignedIdentifiers
:rtype: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]]
:rtype: dict[str, ~azure.data.tables.TableAccessPolicy] or dict[str, None]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
timeout = kwargs.pop("timeout", None)
@ -204,9 +203,8 @@ class TableClient(AsyncTablesBaseClient):
"""Sets stored access policies for the table that may be used with Shared Access Signatures.
:param signed_identifiers: Access policies to set for the table
:type signed_identifiers: Dict[str, :class:`~azure.data.tables.TableAccessPolicy`]
:type signed_identifiers: dict[str, ~azure.data.tables.TableAccessPolicy]
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
identifiers = []
@ -235,7 +233,7 @@ class TableClient(AsyncTablesBaseClient):
"""Creates a new table under the given account.
:return: A TableItem representing the created table.
:rtype: :class:`~azure.data.tables.TableItem`
:rtype: ~azure.data.tables.TableItem
:raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists
.. admonition:: Example:
@ -264,7 +262,6 @@ class TableClient(AsyncTablesBaseClient):
the given table name is not found.
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -310,7 +307,6 @@ class TableClient(AsyncTablesBaseClient):
The default value is Unconditionally.
:paramtype match_condition: ~azure.core.MatchConditions
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -370,9 +366,9 @@ class TableClient(AsyncTablesBaseClient):
"""Insert entity in a table.
:param entity: The properties for the table entity.
:type entity: Union[TableEntity, Mapping[str, Any]]
:type entity: ~azure.data.tables.TableEntity or Mapping
:return: Dictionary mapping operation metadata returned from the service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists
@ -397,9 +393,9 @@ class TableClient(AsyncTablesBaseClient):
decoded = _decode_error(error.response, error.message)
if decoded.error_code == "PropertiesNeedValue":
if entity.get("PartitionKey") is None:
raise ValueError("PartitionKey must be present in an entity")
raise ValueError("PartitionKey must be present in an entity") from error
if entity.get("RowKey") is None:
raise ValueError("RowKey must be present in an entity")
raise ValueError("RowKey must be present in an entity") from error
_validate_tablename_error(decoded, self.table_name)
_reraise_error(error)
return _trim_service_metadata(metadata, content=content) # type: ignore
@ -415,16 +411,16 @@ class TableClient(AsyncTablesBaseClient):
"""Update entity in a table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:keyword str etag: Etag of the entity
:keyword match_condition: The condition under which to perform the operation.
Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally.
The default value is Unconditionally.
:paramtype match_condition: ~azure.core.MatchCondition
:return: Dictionary of operation metadata returned from service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -475,7 +471,7 @@ class TableClient(AsyncTablesBaseClient):
**kwargs
)
else:
raise ValueError("Mode type '{}' is not supported.".format(mode))
raise ValueError(f"Mode type '{mode}' is not supported.")
except HttpResponseError as error:
_process_table_error(error, table_name=self.table_name)
return _trim_service_metadata(metadata, content=content) # type: ignore
@ -486,9 +482,9 @@ class TableClient(AsyncTablesBaseClient):
:keyword int results_per_page: Number of entities returned per service request.
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:return: AsyncItemPaged[:class:`~azure.data.tables.TableEntity`]
:rtype: ~azure.core.async_paging.AsyncItemPaged[TableEntity]
:paramtype select: str or list[str]
:return: An async iterator of :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.data.tables.TableEntity]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -526,11 +522,11 @@ class TableClient(AsyncTablesBaseClient):
on filter formatting, see the `samples documentation <https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/tables/azure-data-tables/samples#writing-filters>`_.
:keyword int results_per_page: Number of entities returned per service request.
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:paramtype select: str or list[str]
:keyword parameters: Dictionary for formatting query with additional, user defined parameters
:paramtype parameters: Dict[str, Any]
:return: AsyncItemPaged[:class:`~azure.data.tables.TableEntity`]
:rtype: ~azure.core.async_paging.AsyncItemPaged[TableEntity]
:paramtype parameters: dict[str, Any]
:return: An async iterator of :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.data.tables.TableEntity]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -575,9 +571,9 @@ class TableClient(AsyncTablesBaseClient):
:param row_key: The row key of the entity.
:type row_key: str
:keyword select: Specify desired properties of an entity to return.
:paramtype select: str or List[str]
:paramtype select: str or list[str]
:return: Dictionary mapping operation metadata returned from the service
:rtype: :class:`~azure.data.tables.TableEntity`
:rtype: ~azure.data.tables.TableEntity
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -615,11 +611,11 @@ class TableClient(AsyncTablesBaseClient):
"""Update/Merge or Insert entity into table.
:param entity: The properties for the table entity.
:type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str]
:type entity: ~azure.data.tables.TableEntity or dict[str, Any]
:param mode: Merge or Replace entity
:type mode: :class:`~azure.data.tables.UpdateMode`
:type mode: ~azure.data.tables.UpdateMode
:return: Dictionary mapping operation metadata returned from the service
:rtype: Dict[str,str]
:rtype: dict[str, Any]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -659,10 +655,7 @@ class TableClient(AsyncTablesBaseClient):
)
else:
raise ValueError(
"""Update mode {} is not supported.
For a list of supported modes see the UpdateMode enum""".format(
mode
)
f"Update mode {mode} is not supported. For a list of supported modes see the UpdateMode enum."
)
except HttpResponseError as error:
_process_table_error(error, table_name=self.table_name)
@ -690,7 +683,7 @@ class TableClient(AsyncTablesBaseClient):
:type operations:
Union[Iterable[Tuple[str, Entity, Mapping[str, Any]]],AsyncIterable[Tuple[str, Entity, Mapping[str, Any]]]]
:return: A list of mappings with response metadata for each operation in the transaction.
:rtype: List[Mapping[str, Any]]
:rtype: list[Mapping[str, Any]]
:raises ~azure.data.tables.TableTransactionError:
.. admonition:: Example:
@ -718,10 +711,10 @@ class TableClient(AsyncTablesBaseClient):
try:
async for operation in operations: # type: ignore
batched_requests.add_operation(operation)
except TypeError:
except TypeError as exc:
raise TypeError(
"The value of 'operations' must be an iterator or async iterator "
"of Tuples. Please check documentation for correct Tuple format."
)
) from exc
return await self._batch_send(self.table_name, *batched_requests.requests, **kwargs)

Просмотреть файл

@ -40,11 +40,11 @@ class TableServiceClient(AsyncTablesBaseClient):
:keyword credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core),
AzureSasCredential (azure-core), or AsyncTokenCredential from azure-identity.
AzureSasCredential (azure-core), or an AsyncTokenCredential implementation from azure-identity.
:paramtype credential:
:class:`~azure.core.credentials.AzureNamedKeyCredential` or
:class:`~azure.core.credentials.AzureSasCredential` or
:class:`~azure.core.credentials.AsyncTokenCredential`
~azure.core.credentials.AzureNamedKeyCredential or
~azure.core.credentials.AzureSasCredential or
~azure.core.credentials_async.AsyncTokenCredential or None
:keyword str api_version:
The Storage API version to use for requests. Default value is '2019-02-02'.
Setting to an older version may result in reduced feature compatibility.
@ -69,8 +69,12 @@ class TableServiceClient(AsyncTablesBaseClient):
def _format_url(self, hostname: str) -> str:
"""Format the endpoint URL according to the current location
mode hostname.
:param str hostname: The current location mode hostname.
:returns: The full URL to the Tables account.
:rtype: str
"""
return "{}://{}{}".format(self.scheme, hostname, self._query_str)
return f"{self.scheme}://{hostname}{self._query_str}"
@classmethod
def from_connection_string(cls, conn_str: str, **kwargs) -> 'TableServiceClient':
@ -78,7 +82,7 @@ class TableServiceClient(AsyncTablesBaseClient):
:param str conn_str: A connection string to an Azure Tables account.
:returns: A Table service client.
:rtype: :class:`~azure.data.tables.aio.TableServiceClient`
:rtype: ~azure.data.tables.aio.TableServiceClient
.. admonition:: Example:
@ -101,7 +105,7 @@ class TableServiceClient(AsyncTablesBaseClient):
location endpoint when read-access geo-redundant replication is enabled for the account.
:return: Dictionary of service stats
:rtype: Dict[str, object]
:rtype: dict[str, object]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
try:
@ -120,7 +124,7 @@ class TableServiceClient(AsyncTablesBaseClient):
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TableServiceProperties, or the result of cls(response)
:rtype: Dict[str, object]
:rtype: dict[str, object]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
timeout = kwargs.pop("timeout", None)
@ -148,15 +152,14 @@ class TableServiceClient(AsyncTablesBaseClient):
including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules.
:keyword analytics_logging: Properties for analytics
:paramtype analytics_logging: ~azure.data.tables.TableAnalyticsLogging
:paramtype analytics_logging: ~azure.data.tables.TableAnalyticsLogging or None
:keyword hour_metrics: Hour level metrics
:paramtype hour_metrics: ~azure.data.tables.TableMetrics
:paramtype hour_metrics: ~azure.data.tables.TableMetrics or None
:keyword minute_metrics: Minute level metrics
:paramtype minute_metrics: ~azure.data.tables.TableMetrics
:paramtype minute_metrics: ~azure.data.tables.TableMetrics or None
:keyword cors: Cross-origin resource sharing rules
:paramtype cors: List[~azure.data.tables.TableCorsRule]
:paramtype cors: list[~azure.data.tables.TableCorsRule] or None
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
"""
if cors:
@ -180,10 +183,9 @@ class TableServiceClient(AsyncTablesBaseClient):
async def create_table(self, table_name: str, **kwargs) -> TableClient:
"""Creates a new table under the given account.
:param headers:
:param str table_name: The Table name.
:return: TableClient, or the result of cls(response)
:rtype: :class:`~azure.data.tables.aio.TableClient`
:rtype: ~azure.data.tables.aio.TableClient
:raises: :class:`~azure.core.exceptions.ResourceExistsError`
.. admonition:: Example:
@ -208,7 +210,7 @@ class TableServiceClient(AsyncTablesBaseClient):
:param table_name: The Table name.
:type table_name: str
:return: TableClient
:rtype: :class:`~azure.data.tables.aio.TableClient`
:rtype: ~azure.data.tables.aio.TableClient
.. admonition:: Example:
@ -233,7 +235,6 @@ class TableServiceClient(AsyncTablesBaseClient):
:param str table_name: The Table name.
:return: None
:rtype: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -253,8 +254,8 @@ class TableServiceClient(AsyncTablesBaseClient):
"""Queries tables under the given account.
:keyword int results_per_page: Number of tables per page in returned ItemPaged
:return: AsyncItemPaged[:class:`~azure.data.tables.TableItem`]
:rtype: ~azure.core.async_paging.AsyncItemPaged
:return: An async iterator of :class:`~azure.data.tables.TableItem`
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.data.tables.TableItem]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -282,9 +283,9 @@ class TableServiceClient(AsyncTablesBaseClient):
:param str query_filter: Specify a filter to return certain tables.
:keyword int results_per_page: Number of tables per page in return ItemPaged
:keyword parameters: Dictionary for formatting query with additional, user defined parameters
:paramtype parameters: Dict[str, Any]
:return: AsyncItemPaged[:class:`~azure.data.tables.TableItem`]
:rtype: ~azure.core.async_paging.AsyncItemPaged
:paramtype parameters: dict[str, Any]
:return: An async iterator of :class:`~azure.data.tables.TableItem`
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.data.tables.TableItem]
:raises: :class:`~azure.core.exceptions.HttpResponseError`
.. admonition:: Example:
@ -316,7 +317,7 @@ class TableServiceClient(AsyncTablesBaseClient):
:param str table_name: The table name
:returns: A :class:`~azure.data.tables.aio.TableClient` object.
:rtype: :class:`~azure.data.tables.aio.TableClient`
:rtype: ~azure.data.tables.aio.TableClient
"""
pipeline = AsyncPipeline( # type: ignore

Просмотреть файл

@ -170,7 +170,7 @@ import datetime
import os
import uuid
from azure.data.tables import TableServiceClient
from azure.identity import AzureNamedKeyCredential
from azure.core.credentials import AzureNamedKeyCredential
entity = {
"PartitionKey": "pk0001",

Просмотреть файл

@ -6,11 +6,10 @@
import functools
from azure.mgmt.cosmosdb import CosmosDBManagementClient
from azure.mgmt.storage.models import StorageAccount, Endpoints
from azure.mgmt.storage.models import StorageAccount
from azure.mgmt.cosmosdb.models import (
DatabaseAccountCreateUpdateParameters,
Capability,
CreateUpdateOptions
)
from azure_devtools.scenario_tests.exceptions import AzureTestError
@ -121,4 +120,4 @@ class CosmosAccountPreparer(AzureMgmtPreparer):
'decorator @{} in front of this cosmos account preparer.'
raise AzureTestError(template.format(ResourceGroupPreparer.__name__))
CachedCosmosAccountPreparer = functools.partial(CosmosAccountPreparer, use_cache=True, random_name_enabled=True)
CachedCosmosAccountPreparer = functools.partial(CosmosAccountPreparer, use_cache=True, random_name_enabled=True)

Просмотреть файл

@ -7,7 +7,7 @@
# --------------------------------------------------------------------------
import pytest
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from dateutil.tz import tzutc, tzoffset
from enum import Enum
from math import isnan
@ -25,7 +25,6 @@ from azure.data.tables import (
TableAccessPolicy,
UpdateMode
)
from azure.data.tables._common_conversion import TZ_UTC
from azure.core import MatchConditions
from azure.core.credentials import AzureSasCredential
@ -2027,7 +2026,7 @@ class TestTableEntity(AzureRecordedTestCase, TableTestCase):
entity = {
'PartitionKey': partition,
'RowKey': row,
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
}
try:
self.table.create_entity(entity)
@ -2038,7 +2037,7 @@ class TestTableEntity(AzureRecordedTestCase, TableTestCase):
assert isinstance(received.metadata['timestamp'], datetime)
assert received.metadata['timestamp'].year > 2020
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
self.table.update_entity(received, mode=UpdateMode.REPLACE)
received = self.table.get_entity(partition, row)

Просмотреть файл

@ -7,7 +7,7 @@
# --------------------------------------------------------------------------
import pytest
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from dateutil.tz import tzutc, tzoffset
from math import isnan
from uuid import UUID
@ -35,7 +35,6 @@ from azure.data.tables import (
EdmType
)
from azure.data.tables.aio import TableServiceClient
from azure.data.tables._common_conversion import TZ_UTC
from _shared.asynctestcase import AsyncTableTestCase
from async_preparers import tables_decorator_async
@ -2024,7 +2023,7 @@ class TestTableEntityAsync(AzureRecordedTestCase, AsyncTableTestCase):
entity = {
'PartitionKey': partition,
'RowKey': row,
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
}
try:
await self.table.create_entity(entity)
@ -2035,7 +2034,7 @@ class TestTableEntityAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert isinstance(received.metadata['timestamp'], datetime)
assert received.metadata['timestamp'].year > 2020
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
await self.table.update_entity(received, mode=UpdateMode.REPLACE)
received = await self.table.get_entity(partition, row)

Просмотреть файл

@ -8,7 +8,7 @@
from uuid import UUID
import pytest
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from dateutil.tz import tzutc, tzoffset
from math import isnan
@ -31,7 +31,6 @@ from azure.data.tables import (
TableSasPermissions,
TableServiceClient,
)
from azure.data.tables._common_conversion import TZ_UTC
from _shared.testcase import TableTestCase
from preparers import cosmos_decorator
@ -1841,7 +1840,7 @@ class TestTableEntityCosmos(AzureRecordedTestCase, TableTestCase):
entity = {
'PartitionKey': partition,
'RowKey': row,
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
}
try:
self.table.create_entity(entity)
@ -1852,7 +1851,7 @@ class TestTableEntityCosmos(AzureRecordedTestCase, TableTestCase):
assert isinstance(received.metadata['timestamp'], datetime)
assert received.metadata['timestamp'].year > 2020
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
self.table.update_entity(received, mode=UpdateMode.REPLACE)
received = self.table.get_entity(partition, row)

Просмотреть файл

@ -7,7 +7,7 @@
from uuid import UUID
import pytest
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from dateutil.tz import tzutc, tzoffset
from math import isnan
@ -23,7 +23,6 @@ from azure.data.tables import (
TableSasPermissions,
)
from azure.data.tables.aio import TableServiceClient
from azure.data.tables._common_conversion import TZ_UTC
from azure.core import MatchConditions
from azure.core.credentials import AzureSasCredential
from azure.core.exceptions import (
@ -1855,7 +1854,7 @@ class TestTableEntityCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
entity = {
'PartitionKey': partition,
'RowKey': row,
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
'Timestamp': datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
}
try:
await self.table.create_entity(entity)
@ -1866,7 +1865,7 @@ class TestTableEntityCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase):
assert isinstance(received.metadata['timestamp'], datetime)
assert received.metadata['timestamp'].year > 2020
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=TZ_UTC)
received['timestamp'] = datetime(year=1999, month=9, day=9, hour=9, minute=9, tzinfo=timezone.utc)
await self.table.update_entity(received, mode=UpdateMode.REPLACE)
received = await self.table.get_entity(partition, row)