From 4949b0cb4c41eeb8991bcdbdb0bef4144c8f91c1 Mon Sep 17 00:00:00 2001 From: antisch Date: Fri, 13 Mar 2026 15:21:10 +1300 Subject: [PATCH] Migration attempt --- sdk/tables/azure-data-tables/MANIFEST.in | 7 +- sdk/tables/azure-data-tables/_metadata.json | 6 + .../azure-data-tables/apiview-properties.json | 51 + .../azure-data-tables/azure/__init__.py | 2 +- .../azure-data-tables/azure/data/__init__.py | 2 +- .../azure/data/tables/_base_client.py | 23 +- .../azure/data/tables/_decoder.py | 14 + .../azure/data/tables/_generated/__init__.py | 16 +- .../azure/data/tables/_generated/_client.py | 73 +- .../data/tables/_generated/_configuration.py | 48 +- .../azure/data/tables/_generated/_patch.py | 13 +- .../data/tables/_generated/_utils/__init__.py | 6 + .../tables/_generated/_utils/model_base.py | 1350 +++++++++++++++++ .../serialization.py} | 675 +++++---- .../{_vendor.py => _utils/utils.py} | 2 +- .../azure/data/tables/_generated/_version.py | 4 +- .../data/tables/_generated/aio/__init__.py | 16 +- .../data/tables/_generated/aio/_client.py | 75 +- .../tables/_generated/aio/_configuration.py | 48 +- .../data/tables/_generated/aio/_patch.py | 13 +- .../_generated/aio/operations/__init__.py | 16 +- .../_generated/aio/operations/_operations.py | 966 ++++++------ .../_generated/aio/operations/_patch.py | 13 +- .../data/tables/_generated/models/__init__.py | 57 +- .../data/tables/_generated/models/_enums.py | 20 +- .../data/tables/_generated/models/_models.py | 890 ++++++----- .../data/tables/_generated/models/_patch.py | 13 +- .../tables/_generated/operations/__init__.py | 16 +- .../_generated/operations/_operations.py | 1103 +++++++------- .../tables/_generated/operations/_patch.py | 13 +- .../azure/data/tables/_generated/py.typed | 1 - .../azure/data/tables/_models.py | 8 +- .../azure/data/tables/_table_batch.py | 26 +- .../azure/data/tables/_table_client.py | 43 +- .../data/tables/_table_service_client.py | 50 +- .../azure/data/tables/_types.py | 11 + .../azure/data/tables/_version.py | 8 +- .../data/tables/aio/_base_client_async.py | 26 +- .../azure/data/tables/aio/_models.py | 4 +- .../data/tables/aio/_table_client_async.py | 43 +- .../tables/aio/_table_service_client_async.py | 50 +- sdk/tables/azure-data-tables/pyproject.toml | 62 + .../sample_authentication_async.py | 1 + .../async_samples/sample_batching_async.py | 1 + .../sample_conditional_update_async.py | 1 + .../async_samples/sample_copy_table_async.py | 1 + .../sample_create_client_async.py | 1 + .../sample_create_delete_table_async.py | 1 + .../sample_encode_dataclass_model_async.py | 1 + .../sample_encode_pydantic_model_async.py | 1 + .../sample_insert_delete_entities_async.py | 1 + .../async_samples/sample_query_table_async.py | 1 + .../sample_query_tables_async.py | 1 + ...mple_update_upsert_merge_entities_async.py | 1 + .../samples/sample_authentication.py | 1 + .../samples/sample_batching.py | 1 + .../samples/sample_conditional_update.py | 1 + .../samples/sample_copy_table.py | 1 + .../samples/sample_create_client.py | 1 + .../samples/sample_create_delete_table.py | 1 + .../samples/sample_encode_dataclass_model.py | 1 + .../samples/sample_encode_pydantic_model.py | 1 + .../samples/sample_insert_delete_entities.py | 1 + .../samples/sample_query_table.py | 1 + .../samples/sample_query_tables.py | 1 + .../sample_update_upsert_merge_entities.py | 1 + sdk/tables/azure-data-tables/setup.py | 75 - .../tests/_shared/testcase.py | 4 +- .../tests/perfstress_tests/_base.py | 1 + .../azure-data-tables/tests/test_encoder.py | 1 + .../tests/test_encoder_async.py | 1 + .../tests/test_encoder_cosmos.py | 1 + .../tests/test_encoder_cosmos_async.py | 1 + .../azure-data-tables/tests/test_retry.py | 1 + .../tests/test_retry_cosmos.py | 1 + .../azure-data-tables/tests/test_table.py | 5 +- .../tests/test_table_async.py | 1 + .../tests/test_table_batch.py | 1 + .../tests/test_table_batch_async.py | 1 + .../tests/test_table_batch_cosmos.py | 1 + .../tests/test_table_batch_cosmos_async.py | 1 + .../tests/test_table_client.py | 9 +- .../tests/test_table_client_async.py | 9 +- .../tests/test_table_client_cosmos.py | 1 + .../tests/test_table_client_cosmos_async.py | 1 + .../tests/test_table_cosmos.py | 4 +- .../tests/test_table_entity.py | 1 + .../tests/test_table_entity_async.py | 1 + .../tests/test_table_entity_cosmos.py | 1 + .../tests/test_table_entity_cosmos_async.py | 1 + .../tests/test_table_service_stats_async.py | 4 +- .../tests/test_table_service_stats_cosmos.py | 4 +- .../test_table_service_stats_cosmos_async.py | 4 +- .../azure-data-tables/tsp-location.yaml | 4 + 94 files changed, 3917 insertions(+), 2130 deletions(-) create mode 100644 sdk/tables/azure-data-tables/_metadata.json create mode 100644 sdk/tables/azure-data-tables/apiview-properties.json create mode 100644 sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/__init__.py create mode 100644 sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/model_base.py rename sdk/tables/azure-data-tables/azure/data/tables/_generated/{_serialization.py => _utils/serialization.py} (79%) rename sdk/tables/azure-data-tables/azure/data/tables/_generated/{_vendor.py => _utils/utils.py} (96%) delete mode 100644 sdk/tables/azure-data-tables/azure/data/tables/_generated/py.typed create mode 100644 sdk/tables/azure-data-tables/azure/data/tables/_types.py delete mode 100644 sdk/tables/azure-data-tables/setup.py create mode 100644 sdk/tables/azure-data-tables/tsp-location.yaml diff --git a/sdk/tables/azure-data-tables/MANIFEST.in b/sdk/tables/azure-data-tables/MANIFEST.in index 8c54d4fc0df5..3c5c2b29b615 100644 --- a/sdk/tables/azure-data-tables/MANIFEST.in +++ b/sdk/tables/azure-data-tables/MANIFEST.in @@ -1,8 +1,7 @@ -recursive-include tests *.py *.yaml include *.md -include azure/__init__.py -include azure/data/__init__.py include LICENSE +include azure/data/tables/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/data/tables/py.typed +include azure/__init__.py +include azure/data/__init__.py diff --git a/sdk/tables/azure-data-tables/_metadata.json b/sdk/tables/azure-data-tables/_metadata.json new file mode 100644 index 000000000000..eed3997f4014 --- /dev/null +++ b/sdk/tables/azure-data-tables/_metadata.json @@ -0,0 +1,6 @@ +{ + "apiVersion": "2019-02-02", + "apiVersions": { + "Data.Tables": "2019-02-02" + } +} \ No newline at end of file diff --git a/sdk/tables/azure-data-tables/apiview-properties.json b/sdk/tables/azure-data-tables/apiview-properties.json new file mode 100644 index 000000000000..9f18c34c6ad3 --- /dev/null +++ b/sdk/tables/azure-data-tables/apiview-properties.json @@ -0,0 +1,51 @@ +{ + "CrossLanguagePackageId": "Data.Tables", + "CrossLanguageDefinitionId": { + "azure.data.tables.models.AccessPolicy": "Data.Tables.AccessPolicy", + "azure.data.tables.models.CorsRule": "Data.Tables.CorsRule", + "azure.data.tables.models.GeoReplication": "Data.Tables.GeoReplication", + "azure.data.tables.models.Logging": "Data.Tables.Logging", + "azure.data.tables.models.Metrics": "Data.Tables.Metrics", + "azure.data.tables.models.RetentionPolicy": "Data.Tables.RetentionPolicy", + "azure.data.tables.models.SignedIdentifier": "ClientCustomizations.SignedIdentifier", + "azure.data.tables.models.SignedIdentifiers": "Data.Tables.SignedIdentifiers", + "azure.data.tables.models.TableEntityQueryResponse": "Data.Tables.TableEntityQueryResponse", + "azure.data.tables.models.TableProperties": "Data.Tables.TableProperties", + "azure.data.tables.models.TableResponse": "Data.Tables.TableResponse", + "azure.data.tables.models.TablesError": "Data.Tables.TablesError", + "azure.data.tables.models.TableServiceError": "Data.Tables.TablesServiceError", + "azure.data.tables.models.TableServiceProperties": "Data.Tables.TableServiceProperties", + "azure.data.tables.models.TableServiceStats": "Data.Tables.TableServiceStats", + "azure.data.tables.models.OdataMetadataFormat": "Data.Tables.OdataMetadataFormat", + "azure.data.tables.models.ResponseFormat": "Data.Tables.ResponseFormat", + "azure.data.tables.models.GeoReplicationStatusType": "Data.Tables.GeoReplicationStatusType", + "azure.data.tables.operations.TableOperations.query": "Data.Tables.Table.query", + "azure.data.tables.aio.operations.TableOperations.query": "Data.Tables.Table.query", + "azure.data.tables.operations.TableOperations.create": "Data.Tables.Table.create", + "azure.data.tables.aio.operations.TableOperations.create": "Data.Tables.Table.create", + "azure.data.tables.operations.TableOperations.delete": "Data.Tables.Table.delete", + "azure.data.tables.aio.operations.TableOperations.delete": "Data.Tables.Table.delete", + "azure.data.tables.operations.TableOperations.query_entities": "Data.Tables.Table.queryEntities", + "azure.data.tables.aio.operations.TableOperations.query_entities": "Data.Tables.Table.queryEntities", + "azure.data.tables.operations.TableOperations.query_entity_with_partition_and_row_key": "Data.Tables.Table.queryEntityWithPartitionAndRowKey", + "azure.data.tables.aio.operations.TableOperations.query_entity_with_partition_and_row_key": "Data.Tables.Table.queryEntityWithPartitionAndRowKey", + "azure.data.tables.operations.TableOperations.update_entity": "Data.Tables.Table.updateEntity", + "azure.data.tables.aio.operations.TableOperations.update_entity": "Data.Tables.Table.updateEntity", + "azure.data.tables.operations.TableOperations.merge_entity": "Data.Tables.Table.mergeEntity", + "azure.data.tables.aio.operations.TableOperations.merge_entity": "Data.Tables.Table.mergeEntity", + "azure.data.tables.operations.TableOperations.delete_entity": "Data.Tables.Table.deleteEntity", + "azure.data.tables.aio.operations.TableOperations.delete_entity": "Data.Tables.Table.deleteEntity", + "azure.data.tables.operations.TableOperations.insert_entity": "Data.Tables.Table.insertEntity", + "azure.data.tables.aio.operations.TableOperations.insert_entity": "Data.Tables.Table.insertEntity", + "azure.data.tables.operations.TableOperations.get_access_policy": "Data.Tables.Table.getAccessPolicy", + "azure.data.tables.aio.operations.TableOperations.get_access_policy": "Data.Tables.Table.getAccessPolicy", + "azure.data.tables.operations.TableOperations.set_access_policy": "Data.Tables.Table.setAccessPolicy", + "azure.data.tables.aio.operations.TableOperations.set_access_policy": "Data.Tables.Table.setAccessPolicy", + "azure.data.tables.operations.ServiceOperations.set_properties": "Data.Tables.Service.setProperties", + "azure.data.tables.aio.operations.ServiceOperations.set_properties": "Data.Tables.Service.setProperties", + "azure.data.tables.operations.ServiceOperations.get_properties": "Data.Tables.Service.getProperties", + "azure.data.tables.aio.operations.ServiceOperations.get_properties": "Data.Tables.Service.getProperties", + "azure.data.tables.operations.ServiceOperations.get_statistics": "Data.Tables.Service.getStatistics", + "azure.data.tables.aio.operations.ServiceOperations.get_statistics": "Data.Tables.Service.getStatistics" + } +} \ No newline at end of file diff --git a/sdk/tables/azure-data-tables/azure/__init__.py b/sdk/tables/azure-data-tables/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/tables/azure-data-tables/azure/__init__.py +++ b/sdk/tables/azure-data-tables/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/tables/azure-data-tables/azure/data/__init__.py b/sdk/tables/azure-data-tables/azure/data/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/tables/azure-data-tables/azure/data/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py index a9c0053636e4..661d76864f72 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -24,9 +25,10 @@ NetworkTraceLoggingPolicy, CustomHookPolicy, RequestIdPolicy, + SansIOHTTPPolicy, ) -from ._generated import AzureTable +from ._generated import AzureTableClient as _AzureTableClient from ._common_conversion import _is_cosmos_endpoint, _get_account from ._shared_access_signature import QueryStringConstants from ._constants import ( @@ -48,6 +50,11 @@ # cspell:disable-next-line _DEV_CONN_STRING = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;TableEndpoint=http://127.0.0.1:10002/devstoreaccount1" # pylint: disable=line-too-long + +class _NoOpCredential: + """Placeholder credential for SAS/connection-string authentication scenarios.""" + + AudienceType = Union[ str, Literal[ @@ -149,10 +156,16 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential if self._cosmos_endpoint: self._policies.insert(0, CosmosPatchTransformPolicy()) - self._client = AzureTable(self.url, policies=kwargs.pop("policies", self._policies), **kwargs) + self._client = _AzureTableClient( + self.url, + credential=credential or _NoOpCredential(), # type: ignore[arg-type] + policies=kwargs.pop("policies", self._policies), + authentication_policy=kwargs.pop("authentication_policy", SansIOHTTPPolicy()), + **kwargs, + ) # Incompatible assignment when assigning a str value to a Literal type variable - self._client._config.version = get_api_version( - api_version, self._client._config.version + self._client._config.api_version = get_api_version( + api_version, self._client._config.api_version ) # type: ignore[assignment] @property @@ -219,7 +232,7 @@ def api_version(self) -> str: :return: The Storage API version. :type: str """ - return self._client._config.version # pylint: disable=protected-access + return self._client._config.api_version # pylint: disable=protected-access def __enter__(self) -> Self: self._client.__enter__() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_decoder.py b/sdk/tables/azure-data-tables/azure/data/tables/_decoder.py index a9a79a55562f..9f1b8d5077ab 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_decoder.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_decoder.py @@ -166,6 +166,20 @@ def from_entity_str(self, value: Union[str, bytes]) -> str: def deserialize_iso(value: Optional[str]) -> Optional[TablesEntityDatetime]: if not value: return None + # New generated models may return datetime objects directly via rest_field() + if isinstance(value, datetime): + if isinstance(value, TablesEntityDatetime): + return value + return TablesEntityDatetime( + value.year, + value.month, + value.day, + value.hour, + value.minute, + value.second, + value.microsecond, + value.tzinfo, + ) # Cosmos returns this with a decimal point that throws an error on deserialization cleaned_value = _clean_up_dotnet_timestamps(value) try: diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/__init__.py index d00af573949a..3c71c0b102ca 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/__init__.py @@ -2,25 +2,31 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._client import AzureTable +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import AzureTableClient # type: ignore from ._version import VERSION __version__ = VERSION try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AzureTable", + "AzureTableClient", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_client.py index 2230d9fcc4bf..817787d62799 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_client.py @@ -2,52 +2,73 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any +from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core import PipelineClient +from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse -from . import models as _models -from ._configuration import AzureTableConfiguration -from ._serialization import Deserializer, Serializer +from ._configuration import AzureTableClientConfiguration +from ._utils.serialization import Deserializer, Serializer from .operations import ServiceOperations, TableOperations +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class AzureTable: # pylint: disable=client-accepts-api-version-keyword - """AzureTable. + +class AzureTableClient: + """AzureTableClient. :ivar table: TableOperations operations - :vartype table: azure.table.operations.TableOperations + :vartype table: azure.data.tables.operations.TableOperations :ivar service: ServiceOperations operations - :vartype service: azure.table.operations.ServiceOperations - :param url: The URL of the service account or table that is the target of the desired - operation. Required. + :vartype service: azure.data.tables.operations.ServiceOperations + :param url: The host name of the tables account, e.g. accountName.table.core.windows.net. + Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2019-02-02". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version. Known values are "2019-02-02" and None. Default value is + "2019-02-02". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, **kwargs: Any - ) -> None: + def __init__(self, url: str, credential: "TokenCredential", **kwargs: Any) -> None: _endpoint = "{url}" - self._config = AzureTableConfiguration(url=url, **kwargs) - self._client: PipelineClient = PipelineClient(base_url=_endpoint, config=self._config, **kwargs) - - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._config = AzureTableClientConfiguration(url=url, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) - def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -71,12 +92,12 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: self._client.close() - def __enter__(self) -> "AzureTable": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_configuration.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_configuration.py index 5891c6cc373c..1773ce9de6b7 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_configuration.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_configuration.py @@ -2,48 +2,50 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any +from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from ._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class AzureTableConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for AzureTable. +class AzureTableClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for AzureTableClient. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account or table that is the target of the desired - operation. Required. + :param url: The host name of the tables account, e.g. accountName.table.core.windows.net. + Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2019-02-02". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version. Known values are "2019-02-02" and None. Default value is + "2019-02-02". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - super(AzureTableConfiguration, self).__init__(**kwargs) - version: Literal["2019-02-02"] = kwargs.pop("version", "2019-02-02") + def __init__(self, url: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2019-02-02") if url is None: raise ValueError("Parameter 'url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") self.url = url - self.version = version - kwargs.setdefault("sdk_moniker", "table/{}".format(VERSION)) + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "data-tables/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -52,7 +54,11 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_patch.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_patch.py index f7dd32510333..87676c65a8f0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_patch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/model_base.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/model_base.py new file mode 100644 index 000000000000..7b7f8ba67b53 --- /dev/null +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/model_base.py @@ -0,0 +1,1350 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +# pylint: disable=too-many-instance-attributes +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._is_optional = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_serialization.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/serialization.py similarity index 79% rename from sdk/tables/azure-data-tables/azure/data/tables/_generated/_serialization.py rename to sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/serialization.py index 9f3e29b11388..81ec1de5922b 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_serialization.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/serialization.py @@ -1,30 +1,12 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -39,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -48,11 +29,7 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, - List, - Mapping, ) try: @@ -62,13 +39,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self -from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback -from azure.core.serialization import NULL as AzureCoreNull +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -91,6 +68,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -112,7 +91,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: @@ -124,7 +103,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: pass return ET.fromstring(data_as_str) # nosec - except ET.ParseError: + except ET.ParseError as err: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken @@ -143,7 +122,9 @@ def _json_attemp(data): # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") - raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -153,6 +134,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -170,13 +156,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], return None -try: - basestring # type: ignore - unicode_str = unicode # type: ignore -except NameError: - basestring = str - unicode_str = str - _LOGGER = logging.getLogger(__name__) try: @@ -184,80 +163,31 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0.""" - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation.""" - return "Z" - - def dst(self, dt): - """No daylight saving for UTC.""" - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset): - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? None: - self.additional_properties: Dict[str, Any] = {} - for k in kwargs: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): @@ -305,13 +242,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -331,7 +278,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -340,7 +291,7 @@ def _create_xml_node(cls): return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to azure from this model. + """Return the JSON that would be sent to server from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. @@ -351,12 +302,14 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -385,12 +338,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -400,30 +356,31 @@ def _infer_class_models(cls): client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -431,9 +388,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -445,7 +404,7 @@ def from_dict( if key_extractors is None else key_extractors ) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def _flatten_subtype(cls, key, objects): @@ -453,21 +412,25 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None if not isinstance(response, ET.Element): rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: @@ -506,11 +469,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer: # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -545,7 +510,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -561,17 +526,20 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -597,17 +565,19 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = target_obj._create_xml_node() # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): continue if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -638,7 +608,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -649,7 +620,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = unicode_str(new_attr) + local_node.text = str(new_attr) serialized.append(local_node) # type: ignore else: # JSON for k in reversed(keys): # type: ignore @@ -668,18 +639,18 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise_with_traceback(SerializationError, msg, err) - else: - return serialized + raise SerializationError(msg) from err + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -708,20 +679,22 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: - raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + raise SerializationError("Unable to build a model: " + str(err)) from err return self._serialize(data, data_type, **kwargs) def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -730,32 +703,30 @@ def url(self, name, data, data_type, **kwargs): if kwargs.get("skip_quote") is True: output = str(output) - # https://github.com/Azure/autorest.python/issues/2063 output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. - :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] do_quote = not kwargs.get("skip_quote", False) - return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -765,19 +736,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -786,37 +758,36 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") try: - if data is AzureCoreNull: + if data is CoreNull: return None if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -826,12 +797,11 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." - raise_with_traceback(SerializationError, msg.format(data, data_type), err) - else: - return self._serialize(data, **kwargs) + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -847,23 +817,33 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -877,8 +857,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -888,15 +867,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -951,9 +928,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -977,7 +953,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -985,6 +961,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -995,7 +972,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) - if obj_type is unicode_str: + if obj_type is str: return self.serialize_unicode(attr) if obj_type is datetime.datetime: return self.serialize_iso(attr) @@ -1009,7 +986,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1040,56 +1017,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1097,11 +1079,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1111,30 +1094,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1147,12 +1132,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1172,19 +1158,20 @@ def serialize_iso(attr, **kwargs): return date + microseconds + "Z" except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) + raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1192,17 +1179,17 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1211,14 +1198,15 @@ def rest_key_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1232,7 +1220,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1240,17 +1227,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1287,7 +1286,7 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1339,22 +1338,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1362,7 +1360,7 @@ def xml_key_extractor(attr, attr_desc, data): return children[0] -class Deserializer(object): +class Deserializer: """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. @@ -1371,9 +1369,9 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1393,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1409,27 +1407,29 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1446,15 +1446,15 @@ def _deserialize(self, target_obj, data): response, class_name = self._classify_target(target_obj, data) - if isinstance(response, basestring): + if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1483,10 +1483,9 @@ def _deserialize(self, target_obj, data): d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore - raise_with_traceback(DeserializationError, msg, err) - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1513,18 +1512,20 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None - if isinstance(target, basestring): + if isinstance(target, str): try: target = self.dependencies[target] except KeyError: return target, target try: - target = target._classify(data, self.dependencies) + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1539,10 +1540,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1560,10 +1563,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1580,31 +1585,42 @@ def _unpack_content(raw_data, content_type=None): if hasattr(raw_data, "_content_consumed"): return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: - response_obj.additional_properties = additional_properties + response_obj.additional_properties = additional_properties # type: ignore return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1613,15 +1629,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1635,7 +1652,11 @@ def deserialize_data(self, data, data_type): if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) @@ -1654,15 +1675,15 @@ def deserialize_data(self, data, data_type): except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) - raise_with_traceback(DeserializationError, msg, err) - else: - return self._deserialize(obj_type, data) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1679,6 +1700,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1689,20 +1711,21 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr - if isinstance(attr, basestring): + if isinstance(attr, str): return self.deserialize_basic(attr, "str") obj_type = type(attr) if obj_type in self.basic_types: @@ -1728,11 +1751,10 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1740,8 +1762,9 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1751,24 +1774,27 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, basestring): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): @@ -1776,6 +1802,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1789,8 +1816,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1802,6 +1828,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1810,12 +1837,11 @@ def deserialize_enum(data, enum_obj): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. - # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1831,8 +1857,9 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1843,8 +1870,9 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1858,24 +1886,26 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal - :raises: DeserializationError if string format invalid. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text try: - return decimal.Decimal(attr) # type: ignore + return decimal.Decimal(str(attr)) # type: ignore except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err @staticmethod def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1886,8 +1916,9 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1895,17 +1926,17 @@ def deserialize_duration(attr): duration = isodate.parse_duration(attr) except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." - raise_with_traceback(DeserializationError, msg, err) - else: - return duration + raise DeserializationError(msg) from err + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1919,8 +1950,9 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1933,31 +1965,32 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." - raise_with_traceback(DeserializationError, msg, err) - else: - return date_obj + raise DeserializationError(msg) from err + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1984,9 +2017,8 @@ def deserialize_iso(attr): raise OverflowError("Hit max or min date") except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." - raise_with_traceback(DeserializationError, msg, err) - else: - return date_obj + raise DeserializationError(msg) from err + return date_obj @staticmethod def deserialize_unix(attr): @@ -1994,15 +2026,16 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore try: + attr = int(attr) date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: msg = "Cannot deserialize to unix datetime object." - raise_with_traceback(DeserializationError, msg, err) - else: - return date_obj + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_vendor.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/utils.py similarity index 96% rename from sdk/tables/azure-data-tables/azure/data/tables/_generated/_vendor.py rename to sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/utils.py index 5ce21f06727e..cbaa624660e4 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_vendor.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_utils/utils.py @@ -1,7 +1,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_version.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_version.py index 0a99d31fccc0..2c3e0feeddce 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/_version.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/_version.py @@ -2,8 +2,8 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2019-02-02" +VERSION = "12.8.0b1" diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/__init__.py index ae03cc3ab39b..18e347829c41 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/__init__.py @@ -2,22 +2,28 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._client import AzureTable +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import AzureTableClient # type: ignore try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AzureTable", + "AzureTableClient", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_client.py index 585004a33b2d..62fd135ebaf4 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_client.py @@ -2,52 +2,75 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest -from .. import models as _models -from .._serialization import Deserializer, Serializer -from ._configuration import AzureTableConfiguration +from .._utils.serialization import Deserializer, Serializer +from ._configuration import AzureTableClientConfiguration from .operations import ServiceOperations, TableOperations +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class AzureTable: # pylint: disable=client-accepts-api-version-keyword - """AzureTable. + +class AzureTableClient: + """AzureTableClient. :ivar table: TableOperations operations - :vartype table: azure.table.aio.operations.TableOperations + :vartype table: azure.data.tables.aio.operations.TableOperations :ivar service: ServiceOperations operations - :vartype service: azure.table.aio.operations.ServiceOperations - :param url: The URL of the service account or table that is the target of the desired - operation. Required. + :vartype service: azure.data.tables.aio.operations.ServiceOperations + :param url: The host name of the tables account, e.g. accountName.table.core.windows.net. + Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2019-02-02". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version. Known values are "2019-02-02" and None. Default value is + "2019-02-02". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, **kwargs: Any - ) -> None: + def __init__(self, url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: _endpoint = "{url}" - self._config = AzureTableConfiguration(url=url, **kwargs) - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, config=self._config, **kwargs) - - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._config = AzureTableClientConfiguration(url=url, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.table = TableOperations(self._client, self._config, self._serialize, self._deserialize) self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) - def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -71,12 +94,12 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHt } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureTable": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_configuration.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_configuration.py index 65d6a3c7afcd..23be8c99142b 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_configuration.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_configuration.py @@ -2,48 +2,50 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any +from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from .._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class AzureTableConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for AzureTable. +class AzureTableClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for AzureTableClient. Note that all parameters used to create this instance are saved as instance attributes. - :param url: The URL of the service account or table that is the target of the desired - operation. Required. + :param url: The host name of the tables account, e.g. accountName.table.core.windows.net. + Required. :type url: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2019-02-02". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version. Known values are "2019-02-02" and None. Default value is + "2019-02-02". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__(self, url: str, **kwargs: Any) -> None: - super(AzureTableConfiguration, self).__init__(**kwargs) - version: Literal["2019-02-02"] = kwargs.pop("version", "2019-02-02") + def __init__(self, url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2019-02-02") if url is None: raise ValueError("Parameter 'url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") self.url = url - self.version = version - kwargs.setdefault("sdk_moniker", "table/{}".format(VERSION)) + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "data-tables/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -52,7 +54,11 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_patch.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_patch.py index f7dd32510333..87676c65a8f0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_patch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/__init__.py index 8467e05a2502..7b004d4708fa 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/__init__.py @@ -2,20 +2,26 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._operations import TableOperations -from ._operations import ServiceOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import TableOperations # type: ignore +from ._operations import ServiceOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "TableOperations", "ServiceOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_operations.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_operations.py index 769fa6a4fb2f..49dffcf09511 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_operations.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_operations.py @@ -1,16 +1,19 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import json +from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload +import urllib.parse -from azure.core import MatchConditions +from azure.core import AsyncPipelineClient, MatchConditions +from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -18,14 +21,26 @@ ResourceModifiedError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from ... import models as _models +from ..._utils.model_base import ( + SdkJSONEncoder, + _deserialize, + _deserialize_xml, + _failsafe_deserialize, + _failsafe_deserialize_xml, + _get_element, +) +from ..._utils.serialization import Deserializer, Serializer from ...operations._operations import ( build_service_get_properties_request, build_service_get_statistics_request, @@ -42,13 +57,10 @@ build_table_set_access_policy_request, build_table_update_entity_request, ) +from .._configuration import AzureTableClientConfiguration -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] class TableOperations: @@ -57,115 +69,112 @@ class TableOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.table.aio.AzureTable`'s + :class:`~azure.data.tables.aio.AzureTableClient`'s :attr:`table` attribute. """ - models = _models - def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureTableClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async - async def query( + @distributed_trace + def query( self, *, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, top: Optional[int] = None, select: Optional[str] = None, filter: Optional[str] = None, - next_table_name: Optional[str] = None, **kwargs: Any - ) -> _models.TableQueryResponse: + ) -> AsyncItemPaged["_models.TableProperties"]: """Queries tables under the given account. - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword top: Maximum number of records to return. Default value is None. + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword top: Specifies the maximum number of records to return. Default value is None. :paramtype top: int :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword next_table_name: A table query continuation token from a previous call. Default value - is None. - :paramtype next_table_name: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableQueryResponse - :rtype: ~azure.table.models.TableQueryResponse + :return: An iterator like instance of TableProperties + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.data.tables._generated.models.TableProperties] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - cls: ClsType[_models.TableQueryResponse] = kwargs.pop("cls", None) + cls: ClsType[list[_models.TableProperties]] = kwargs.pop("cls", None) - request = build_table_query_request( - format=format, - top=top, - select=select, - filter=filter, - next_table_name=next_table_name, - data_service_version=data_service_version, - version=self._config.version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - request.url = self._client.format_url(request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) + error_map.update(kwargs.pop("error_map", {}) or {}) - response = pipeline_response.http_response + def prepare_request(_continuation_token=None): + + _request = build_table_query_request( + format=format, + top=top, + select=select, + filter=filter, + next_table_name=_continuation_token, + data_service_version=data_service_version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + list[_models.TableProperties], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return pipeline_response.http_response.headers.get("x-ms-continuation-NextTableName") or None, AsyncList( + list_of_elem + ) - if response.status_code not in [200]: - if _stream: - await response.read() # Load the body in memory and close the socket - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + async def get_next(_continuation_token=None): + _request = prepare_request(_continuation_token) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-continuation-NextTableName"] = self._deserialize( - "str", response.headers.get("x-ms-continuation-NextTableName") - ) + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response - deserialized = self._deserialize("TableQueryResponse", pipeline_response) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) + raise HttpResponseError(response=response, model=error) - if cls: - return cls(pipeline_response, deserialized, response_headers) + return pipeline_response - return deserialized + return AsyncItemPaged(get_next, extract_data) @distributed_trace_async async def create( @@ -178,24 +187,22 @@ async def create( ) -> Optional[_models.TableResponse]: """Creates a new table under the given account. - :param table_properties: The Table properties. Required. - :type table_properties: ~azure.table.models.TableProperties - :keyword format: Specifies the media type for the response. Known values are: + :param table_properties: The table properties to create. Required. + :type table_properties: ~azure.data.tables._generated.models.TableProperties + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword response_preference: Specifies whether the response should include the inserted entity - in the payload. Possible values are return-no-content and return-content. Known values are: + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword response_preference: Specifies whether the response should include the created table + in the + payload. Possible values are return-no-content and return-content. Known values are: "return-no-content" and "return-content". Default value is None. - :paramtype response_preference: str or ~azure.table.models.ResponseFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableResponse or None - :rtype: ~azure.table.models.TableResponse or None + :paramtype response_preference: str or ~azure.data.tables.models.ResponseFormat + :return: TableResponse or None. The TableResponse is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableResponse or None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -214,71 +221,82 @@ async def create( ) cls: ClsType[Optional[_models.TableResponse]] = kwargs.pop("cls", None) - _json = self._serialize.body(table_properties, "TableProperties") + _content = json.dumps(table_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - request = build_table_create_request( + _request = build_table_create_request( format=format, response_preference=response_preference, - data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + data_service_version=data_service_version, + api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [201, 204]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) deserialized = None response_headers = {} if response.status_code == 201: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableResponse", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableResponse, response.json()) if response.status_code == 204: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace_async - async def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Operation permanently deletes the specified table. + async def delete(self, table: str, **kwargs: Any) -> None: + """Deletes an existing table. :param table: The name of the table. Required. :type table: str @@ -286,7 +304,7 @@ async def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=in :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -299,87 +317,86 @@ async def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=in cls: ClsType[None] = kwargs.pop("cls", None) - request = build_table_delete_request( + _request = build_table_delete_request( table=table, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async async def query_entities( self, table: str, *, - timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, top: Optional[int] = None, select: Optional[str] = None, filter: Optional[str] = None, + timeout: Optional[int] = None, next_partition_key: Optional[str] = None, next_row_key: Optional[str] = None, **kwargs: Any ) -> _models.TableEntityQueryResponse: - """Queries entities in a table. + """Queries entities under the given table. :param table: The name of the table. Required. :type table: str - :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. - :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword top: Maximum number of records to return. Default value is None. + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword top: Specifies the maximum number of records to return. Default value is None. :paramtype top: int :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword next_partition_key: An entity query continuation token from a previous call. Default - value is None. + :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. + :paramtype timeout: int + :keyword next_partition_key: An entity partition key query continuation token from a previous + call. Default value is None. :paramtype next_partition_key: str - :keyword next_row_key: An entity query continuation token from a previous call. Default value - is None. + :keyword next_row_key: An entity row key query continuation token from a previous call. Default + value is None. :paramtype next_row_key: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableEntityQueryResponse - :rtype: ~azure.table.models.TableEntityQueryResponse + :return: TableEntityQueryResponse. The TableEntityQueryResponse is compatible with + MutableMapping + :rtype: ~azure.data.tables._generated.models.TableEntityQueryResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -387,7 +404,7 @@ async def query_entities( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( @@ -395,59 +412,70 @@ async def query_entities( ) cls: ClsType[_models.TableEntityQueryResponse] = kwargs.pop("cls", None) - request = build_table_query_entities_request( + _request = build_table_query_entities_request( table=table, - timeout=timeout, format=format, top=top, select=select, filter=filter, + timeout=timeout, next_partition_key=next_partition_key, next_row_key=next_row_key, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["x-ms-continuation-NextPartitionKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextPartitionKey") ) response_headers["x-ms-continuation-NextRowKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextRowKey") ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableEntityQueryResponse", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableEntityQueryResponse, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace_async async def query_entity_with_partition_and_row_key( @@ -461,8 +489,8 @@ async def query_entity_with_partition_and_row_key( select: Optional[str] = None, filter: Optional[str] = None, **kwargs: Any - ) -> Dict[str, Any]: - """Queries a single entity in a table. + ) -> dict[str, Any]: + """Retrieve a single entity. :param table: The name of the table. Required. :type table: str @@ -472,23 +500,20 @@ async def query_entity_with_partition_and_row_key( :type row_key: str :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: dict mapping str to any :rtype: dict[str, any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -496,15 +521,15 @@ async def query_entity_with_partition_and_row_key( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - cls: ClsType[Dict[str, Any]] = kwargs.pop("cls", None) + cls: ClsType[dict[str, Any]] = kwargs.pop("cls", None) - request = build_table_query_entity_with_partition_and_row_key_request( + _request = build_table_query_entity_with_partition_and_row_key_request( table=table, partition_key=partition_key, row_key=row_key, @@ -513,36 +538,37 @@ async def query_entity_with_partition_and_row_key( select=select, filter=filter, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) response_headers["x-ms-continuation-NextPartitionKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextPartitionKey") @@ -550,21 +576,31 @@ async def query_entity_with_partition_and_row_key( response_headers["x-ms-continuation-NextRowKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextRowKey") ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("{object}", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(dict[str, Any], response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @overload - async def update_entity( # pylint: disable=inconsistent-return-statements + async def update_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -585,10 +621,10 @@ async def update_entity( # pylint: disable=inconsistent-return-statements :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -597,21 +633,18 @@ async def update_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def update_entity( # pylint: disable=inconsistent-return-statements + async def update_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[IO] = None, + table_entity_properties: Optional[IO[bytes]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -629,13 +662,13 @@ async def update_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Default value is None. - :type table_entity_properties: IO + :type table_entity_properties: IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -644,21 +677,18 @@ async def update_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def update_entity( # pylint: disable=inconsistent-return-statements + async def update_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Union[Dict[str, Any], IO]] = None, + table_entity_properties: Optional[Union[dict[str, Any], IO[bytes]]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -675,30 +705,24 @@ async def update_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Is either a {str: Any} - type or a IO type. Default value is None. - :type table_entity_properties: dict[str, any] or IO + type or a IO[bytes] type. Default value is None. + :type table_entity_properties: dict[str, any] or IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is None. :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -719,20 +743,20 @@ async def update_entity( # pylint: disable=inconsistent-return-statements "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if table_entity_properties else None cls: ClsType[None] = kwargs.pop("cls", None) - content_type = content_type or "application/json" - _json = None + content_type = content_type or "application/json" if table_entity_properties else None _content = None if isinstance(table_entity_properties, (IOBase, bytes)): _content = table_entity_properties else: if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_update_entity_request( + _request = build_table_update_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -742,8 +766,7 @@ async def update_entity( # pylint: disable=inconsistent-return-statements match_condition=match_condition, data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -751,41 +774,42 @@ async def update_entity( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @overload - async def merge_entity( # pylint: disable=inconsistent-return-statements + async def merge_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -806,10 +830,10 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -818,21 +842,18 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @overload - async def merge_entity( # pylint: disable=inconsistent-return-statements + async def merge_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[IO] = None, + table_entity_properties: Optional[IO[bytes]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -850,13 +871,13 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Default value is None. - :type table_entity_properties: IO + :type table_entity_properties: IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -865,21 +886,18 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async - async def merge_entity( # pylint: disable=inconsistent-return-statements + async def merge_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Union[Dict[str, Any], IO]] = None, + table_entity_properties: Optional[Union[dict[str, Any], IO[bytes]]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -896,30 +914,24 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Is either a {str: Any} - type or a IO type. Default value is None. - :type table_entity_properties: dict[str, any] or IO + type or a IO[bytes] type. Default value is None. + :type table_entity_properties: dict[str, any] or IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is None. :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -940,20 +952,20 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if table_entity_properties else None cls: ClsType[None] = kwargs.pop("cls", None) - content_type = content_type or "application/json" - _json = None + content_type = content_type or "application/json" if table_entity_properties else None _content = None if isinstance(table_entity_properties, (IOBase, bytes)): _content = table_entity_properties else: if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_merge_entity_request( + _request = build_table_merge_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -963,8 +975,7 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements match_condition=match_condition, data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -972,36 +983,37 @@ async def merge_entity( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def delete_entity( # pylint: disable=inconsistent-return-statements + async def delete_entity( self, table: str, partition_key: str, @@ -1027,18 +1039,15 @@ async def delete_entity( # pylint: disable=inconsistent-return-statements :paramtype match_condition: ~azure.core.MatchConditions :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1052,7 +1061,7 @@ async def delete_entity( # pylint: disable=inconsistent-return-statements error_map[412] = ResourceExistsError error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( @@ -1060,7 +1069,7 @@ async def delete_entity( # pylint: disable=inconsistent-return-statements ) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_table_delete_entity_request( + _request = build_table_delete_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -1069,75 +1078,74 @@ async def delete_entity( # pylint: disable=inconsistent-return-statements timeout=timeout, format=format, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async async def insert_entity( self, table: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, response_preference: Optional[Union[str, _models.ResponseFormat]] = None, **kwargs: Any - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Insert entity in a table. :param table: The name of the table. Required. :type table: str - :param table_entity_properties: The properties for the table entity. Default value is None. + :param table_entity_properties: The entity properties to insert. Default value is None. :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword response_preference: Specifies whether the response should include the inserted entity - in the payload. Possible values are return-no-content and return-content. Known values are: + in the + payload. Possible values are return-no-content and return-content. Known values are: "return-no-content" and "return-content". Default value is None. - :paramtype response_preference: str or ~azure.table.models.ResponseFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str + :paramtype response_preference: str or ~azure.data.tables.models.ResponseFormat :return: dict mapping str to any or None :rtype: dict[str, any] or None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1151,86 +1159,96 @@ async def insert_entity( data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - content_type: str = kwargs.pop( + content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", "application/json;odata=nometadata") ) - cls: ClsType[Optional[Dict[str, Any]]] = kwargs.pop("cls", None) + content_type = content_type if table_entity_properties else None + cls: ClsType[Optional[dict[str, Any]]] = kwargs.pop("cls", None) if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_insert_entity_request( + _request = build_table_insert_entity_request( table=table, timeout=timeout, format=format, response_preference=response_preference, - data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + data_service_version=data_service_version, + api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [201, 204]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) deserialized = None response_headers = {} if response.status_code == 201: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("{object}", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(dict[str, Any], response.json()) if response.status_code == 204: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace_async async def get_access_policy( self, table: str, *, timeout: Optional[int] = None, **kwargs: Any - ) -> List[_models.SignedIdentifier]: + ) -> _models.SignedIdentifiers: """Retrieves details about any stored access policies specified on the table that may be used with Shared Access Signatures. @@ -1238,15 +1256,11 @@ async def get_access_policy( :type table: str :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword comp: Required query string to handle stored access policies for the table that may be - used with Shared Access Signatures. Default value is "acl". Note that overriding this default - value may result in unsupported behavior. - :paramtype comp: str - :return: SignedIdentifier - :rtype: ~azure.table.models.SignedIdentifier + :return: SignedIdentifiers. The SignedIdentifiers is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.SignedIdentifiers :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1255,79 +1269,79 @@ async def get_access_policy( error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + cls: ClsType[_models.SignedIdentifiers] = kwargs.pop("cls", None) - request = build_table_get_access_policy_request( + _request = build_table_get_access_policy_request( table=table, timeout=timeout, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.SignedIdentifiers, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace_async - async def set_access_policy( # pylint: disable=inconsistent-return-statements - self, - table: str, - table_acl: Optional[List[_models.SignedIdentifier]] = None, - *, - timeout: Optional[int] = None, - **kwargs: Any + async def set_access_policy( + self, table: str, table_acl: _models.SignedIdentifiers, *, timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets stored access policies for the table that may be used with Shared Access Signatures. :param table: The name of the table. Required. :type table: str - :param table_acl: The acls for the table. Default value is None. - :type table_acl: ~azure.table.models.SignedIdentifier + :param table_acl: The access control list for the table. Required. + :type table_acl: ~azure.data.tables._generated.models.SignedIdentifiers :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword comp: Required query string to handle stored access policies for the table that may be - used with Shared Access Signatures. Default value is "acl". Note that overriding this default - value may result in unsupported behavior. - :paramtype comp: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1336,26 +1350,18 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) cls: ClsType[None] = kwargs.pop("cls", None) - serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} - if table_acl is not None: - _content = self._serialize.body( - table_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt - ) - else: - _content = None + _content = _get_element(table_acl) - request = build_table_set_access_policy_request( + _request = build_table_set_access_policy_request( table=table, timeout=timeout, - comp=comp, content_type=content_type, - version=self._config.version, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1363,32 +1369,33 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore class ServiceOperations: @@ -1397,41 +1404,33 @@ class ServiceOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.table.aio.AzureTable`'s + :class:`~azure.data.tables.aio.AzureTableClient`'s :attr:`service` attribute. """ - models = _models - def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureTableClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def set_properties( # pylint: disable=inconsistent-return-statements + async def set_properties( self, table_service_properties: _models.TableServiceProperties, *, timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param table_service_properties: The Table Service properties. Required. - :type table_service_properties: ~azure.table.models.TableServiceProperties + :param table_service_properties: The table service properties to set. Required. + :type table_service_properties: ~azure.data.tables._generated.models.TableServiceProperties :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to set the service properties. Default value is - "service". Note that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to set the service properties. Default value is - "properties". Note that overriding this default value may result in unsupported behavior. - :paramtype comp: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1440,21 +1439,17 @@ async def set_properties( # pylint: disable=inconsistent-return-statements error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) cls: ClsType[None] = kwargs.pop("cls", None) - _content = self._serialize.body(table_service_properties, "TableServiceProperties", is_xml=True) + _content = _get_element(table_service_properties) - request = build_service_set_properties_request( + _request = build_service_set_properties_request( timeout=timeout, - restype=restype, - comp=comp, content_type=content_type, - version=self._config.version, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1462,31 +1457,32 @@ async def set_properties( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [202]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async async def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _models.TableServiceProperties: @@ -1495,17 +1491,11 @@ async def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to set the service properties. Default value is - "service". Note that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to set the service properties. Default value is - "properties". Note that overriding this default value may result in unsupported behavior. - :paramtype comp: str - :return: TableServiceProperties - :rtype: ~azure.table.models.TableServiceProperties + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableServiceProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1514,52 +1504,59 @@ async def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - request = build_service_get_properties_request( + _request = build_service_get_properties_request( timeout=timeout, - restype=restype, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableServiceProperties", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.TableServiceProperties, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace_async async def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _models.TableServiceStats: @@ -1569,17 +1566,11 @@ async def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to get service stats. Default value is "service". Note - that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to get service stats. Default value is "stats". Note that - overriding this default value may result in unsupported behavior. - :paramtype comp: str - :return: TableServiceStats - :rtype: ~azure.table.models.TableServiceStats + :return: TableServiceStats. The TableServiceStats is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableServiceStats :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1588,50 +1579,57 @@ async def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) cls: ClsType[_models.TableServiceStats] = kwargs.pop("cls", None) - request = build_service_get_statistics_request( + _request = build_service_get_statistics_request( timeout=timeout, - restype=restype, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - await response.read() # Load the body in memory and close the socket + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableServiceStats", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.TableServiceStats, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_patch.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_patch.py index f7dd32510333..87676c65a8f0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_patch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/aio/operations/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/__init__.py index 6037cf52c41c..9b74b3e59bcc 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/__init__.py @@ -2,31 +2,42 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._models import AccessPolicy -from ._models import CorsRule -from ._models import GeoReplication -from ._models import Logging -from ._models import Metrics -from ._models import RetentionPolicy -from ._models import SignedIdentifier -from ._models import TableEntityQueryResponse -from ._models import TableProperties -from ._models import TableQueryResponse -from ._models import TableResponse -from ._models import TableResponseProperties -from ._models import TableServiceError -from ._models import TableServiceProperties -from ._models import TableServiceStats +from typing import TYPE_CHECKING -from ._enums import GeoReplicationStatusType -from ._enums import OdataMetadataFormat -from ._enums import ResponseFormat +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + AccessPolicy, + CorsRule, + GeoReplication, + Logging, + Metrics, + RetentionPolicy, + SignedIdentifier, + SignedIdentifiers, + TableEntityQueryResponse, + TableProperties, + TableResponse, + TableServiceError, + TableServiceProperties, + TableServiceStats, + TablesError, +) + +from ._enums import ( # type: ignore + GeoReplicationStatusType, + OdataMetadataFormat, + ResponseFormat, +) from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -37,17 +48,17 @@ "Metrics", "RetentionPolicy", "SignedIdentifier", + "SignedIdentifiers", "TableEntityQueryResponse", "TableProperties", - "TableQueryResponse", "TableResponse", - "TableResponseProperties", "TableServiceError", "TableServiceProperties", "TableServiceStats", + "TablesError", "GeoReplicationStatusType", "OdataMetadataFormat", "ResponseFormat", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_enums.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_enums.py index 42d527e35998..a285a0ce1e86 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_enums.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_enums.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -14,20 +14,28 @@ class GeoReplicationStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the secondary location.""" LIVE = "live" + """The geo replication is live.""" BOOTSTRAP = "bootstrap" + """The geo replication is bootstrap.""" UNAVAILABLE = "unavailable" + """The geo replication is unavailable.""" class OdataMetadataFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """OdataMetadataFormat.""" + """Specifies the level of metadata to be returned with the response.""" - APPLICATION_JSON_ODATA_NOMETADATA = "application/json;odata=nometadata" - APPLICATION_JSON_ODATA_MINIMALMETADATA = "application/json;odata=minimalmetadata" - APPLICATION_JSON_ODATA_FULLMETADATA = "application/json;odata=fullmetadata" + NO_METADATA = "application/json;odata=nometadata" + """No metadata.""" + MINIMAL_METADATA = "application/json;odata=minimalmetadata" + """Minimal metadata. This is the default and the minimum required for full deserialization.""" + FULL_METADATA = "application/json;odata=fullmetadata" + """Full metadata.""" class ResponseFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ResponseFormat.""" + """Specifies whether the response should echo the created content.""" RETURN_NO_CONTENT = "return-no-content" + """Do not echo the created content.""" RETURN_CONTENT = "return-content" + """Echo the created content.""" diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_models.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_models.py index 391f356e06cf..eed1443438bb 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_models.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_models.py @@ -1,71 +1,76 @@ # coding=utf-8 -# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation import datetime -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union +from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload -from .. import _serialization +from .._utils.model_base import Model as _Model, rest_field if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from .. import models as _models -class AccessPolicy(_serialization.Model): - """An Access policy. +class AccessPolicy(_Model): + """An access policy. - All required parameters must be populated in order to send to Azure. - - :ivar start: The start datetime from which the policy is active. Required. + :ivar start: The date-time the policy is active. Required. :vartype start: str - :ivar expiry: The datetime that the policy expires. Required. + :ivar expiry: The date-time the policy expires. Required. :vartype expiry: str - :ivar permission: The permissions for the acl policy. Required. + :ivar permission: The permissions for acl the policy. Required. :vartype permission: str """ - _validation = { - "start": {"required": True}, - "expiry": {"required": True}, - "permission": {"required": True}, - } - - _attribute_map = { - "start": {"key": "Start", "type": "str", "xml": {"name": "Start"}}, - "expiry": {"key": "Expiry", "type": "str", "xml": {"name": "Expiry"}}, - "permission": {"key": "Permission", "type": "str", "xml": {"name": "Permission"}}, - } - _xml_map = {"name": "AccessPolicy"} + start: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Start", "text": False, "unwrapped": False}, + ) + """The date-time the policy is active. Required.""" + expiry: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Expiry", "text": False, "unwrapped": False}, + ) + """The date-time the policy expires. Required.""" + permission: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Permission", "text": False, "unwrapped": False}, + ) + """The permissions for acl the policy. Required.""" + + _xml = {"attribute": False, "name": "AccessPolicy", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + start: str, + expiry: str, + permission: str, + ) -> None: ... - def __init__(self, *, start: str, expiry: str, permission: str, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword start: The start datetime from which the policy is active. Required. - :paramtype start: str - :keyword expiry: The datetime that the policy expires. Required. - :paramtype expiry: str - :keyword permission: The permissions for the acl policy. Required. - :paramtype permission: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.start = start - self.expiry = expiry - self.permission = permission + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class CorsRule(_serialization.Model): +class CorsRule(_Model): """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. - All required parameters must be populated in order to send to Azure. - :ivar allowed_origins: The origin domains that are permitted to make a request against the service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the @@ -73,7 +78,7 @@ class CorsRule(_serialization.Model): requests via CORS. Required. :vartype allowed_origins: str :ivar allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a - CORS request. (comma separated). Required. + CORS request. Required. :vartype allowed_methods: str :ivar allowed_headers: The request headers that the origin domain may specify on the CORS request. Required. @@ -86,23 +91,44 @@ class CorsRule(_serialization.Model): :vartype max_age_in_seconds: int """ - _validation = { - "allowed_origins": {"required": True}, - "allowed_methods": {"required": True}, - "allowed_headers": {"required": True}, - "exposed_headers": {"required": True}, - "max_age_in_seconds": {"required": True, "minimum": 0}, - } - - _attribute_map = { - "allowed_origins": {"key": "AllowedOrigins", "type": "str", "xml": {"name": "AllowedOrigins"}}, - "allowed_methods": {"key": "AllowedMethods", "type": "str", "xml": {"name": "AllowedMethods"}}, - "allowed_headers": {"key": "AllowedHeaders", "type": "str", "xml": {"name": "AllowedHeaders"}}, - "exposed_headers": {"key": "ExposedHeaders", "type": "str", "xml": {"name": "ExposedHeaders"}}, - "max_age_in_seconds": {"key": "MaxAgeInSeconds", "type": "int", "xml": {"name": "MaxAgeInSeconds"}}, - } - _xml_map = {"name": "CorsRule"} - + allowed_origins: str = rest_field( + name="allowedOrigins", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "AllowedOrigins", "text": False, "unwrapped": False}, + ) + """The origin domains that are permitted to make a request against the service via CORS. The + origin domain is the domain from which the request originates. Note that the origin must be an + exact case-sensitive match with the origin that the user age sends to the service. You can also + use the wildcard character '*' to allow all origin domains to make requests via CORS. Required.""" + allowed_methods: str = rest_field( + name="allowedMethods", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "AllowedMethods", "text": False, "unwrapped": False}, + ) + """The methods (HTTP request verbs) that the origin domain may use for a CORS request. Required.""" + allowed_headers: str = rest_field( + name="allowedHeaders", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "AllowedHeaders", "text": False, "unwrapped": False}, + ) + """The request headers that the origin domain may specify on the CORS request. Required.""" + exposed_headers: str = rest_field( + name="exposedHeaders", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "ExposedHeaders", "text": False, "unwrapped": False}, + ) + """The response headers that may be sent in the response to the CORS request and exposed by the + browser to the request issuer. Required.""" + max_age_in_seconds: int = rest_field( + name="maxAgeInSeconds", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "MaxAgeInSeconds", "text": False, "unwrapped": False}, + ) + """The maximum amount time that a browser should cache the preflight OPTIONS request. Required.""" + + _xml = {"attribute": False, "name": "CorsRule", "text": False, "unwrapped": False} + + @overload def __init__( self, *, @@ -111,116 +137,113 @@ def __init__( allowed_headers: str, exposed_headers: str, max_age_in_seconds: int, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword allowed_origins: The origin domains that are permitted to make a request against the - service via CORS. The origin domain is the domain from which the request originates. Note that - the origin must be an exact case-sensitive match with the origin that the user age sends to the - service. You can also use the wildcard character '*' to allow all origin domains to make - requests via CORS. Required. - :paramtype allowed_origins: str - :keyword allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a - CORS request. (comma separated). Required. - :paramtype allowed_methods: str - :keyword allowed_headers: The request headers that the origin domain may specify on the CORS - request. Required. - :paramtype allowed_headers: str - :keyword exposed_headers: The response headers that may be sent in the response to the CORS - request and exposed by the browser to the request issuer. Required. - :paramtype exposed_headers: str - :keyword max_age_in_seconds: The maximum amount time that a browser should cache the preflight - OPTIONS request. Required. - :paramtype max_age_in_seconds: int + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.allowed_origins = allowed_origins - self.allowed_methods = allowed_methods - self.allowed_headers = allowed_headers - self.exposed_headers = exposed_headers - self.max_age_in_seconds = max_age_in_seconds + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class GeoReplication(_serialization.Model): - """GeoReplication. - All required parameters must be populated in order to send to Azure. +class GeoReplication(_Model): + """Geo-Replication information for the Secondary Storage Service. :ivar status: The status of the secondary location. Required. Known values are: "live", "bootstrap", and "unavailable". - :vartype status: str or ~azure.table.models.GeoReplicationStatusType + :vartype status: str or ~azure.data.tables.models.GeoReplicationStatusType :ivar last_sync_time: A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. Required. :vartype last_sync_time: ~datetime.datetime """ - _validation = { - "status": {"required": True}, - "last_sync_time": {"required": True}, - } - - _attribute_map = { - "status": {"key": "Status", "type": "str", "xml": {"name": "Status"}}, - "last_sync_time": {"key": "LastSyncTime", "type": "rfc-1123", "xml": {"name": "LastSyncTime"}}, - } - _xml_map = {"name": "GeoReplication"} - + status: Union[str, "_models.GeoReplicationStatusType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Status", "text": False, "unwrapped": False}, + ) + """The status of the secondary location. Required. Known values are: \"live\", \"bootstrap\", and + \"unavailable\".""" + last_sync_time: datetime.datetime = rest_field( + name="lastSyncTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "LastSyncTime", "text": False, "unwrapped": False}, + ) + """A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to + be available for read operations at the secondary. Primary writes after this point in time may + or may not be available for reads. Required.""" + + _xml = {"attribute": False, "name": "GeoReplication", "text": False, "unwrapped": False} + + @overload def __init__( self, *, status: Union[str, "_models.GeoReplicationStatusType"], last_sync_time: datetime.datetime, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword status: The status of the secondary location. Required. Known values are: "live", - "bootstrap", and "unavailable". - :paramtype status: str or ~azure.table.models.GeoReplicationStatusType - :keyword last_sync_time: A GMT date/time value, to the second. All primary writes preceding - this value are guaranteed to be available for read operations at the secondary. Primary writes - after this point in time may or may not be available for reads. Required. - :paramtype last_sync_time: ~datetime.datetime + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.status = status - self.last_sync_time = last_sync_time + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class Logging(_serialization.Model): - """Azure Analytics Logging settings. - All required parameters must be populated in order to send to Azure. +class Logging(_Model): + """Azure Analytics Logging settings. - :ivar version: The version of Analytics to configure. Required. + :ivar version: The version of the logging properties. Required. :vartype version: str - :ivar delete: Indicates whether all delete requests should be logged. Required. + :ivar delete: Whether delete operation is logged. Required. :vartype delete: bool - :ivar read: Indicates whether all read requests should be logged. Required. + :ivar read: Whether read operation is logged. Required. :vartype read: bool - :ivar write: Indicates whether all write requests should be logged. Required. + :ivar write: Whether write operation is logged. Required. :vartype write: bool - :ivar retention_policy: The retention policy. Required. - :vartype retention_policy: ~azure.table.models.RetentionPolicy + :ivar retention_policy: The retention policy of the logs. Required. + :vartype retention_policy: ~azure.data.tables._generated.models.RetentionPolicy """ - _validation = { - "version": {"required": True}, - "delete": {"required": True}, - "read": {"required": True}, - "write": {"required": True}, - "retention_policy": {"required": True}, - } - - _attribute_map = { - "version": {"key": "Version", "type": "str", "xml": {"name": "Version"}}, - "delete": {"key": "Delete", "type": "bool", "xml": {"name": "Delete"}}, - "read": {"key": "Read", "type": "bool", "xml": {"name": "Read"}}, - "write": {"key": "Write", "type": "bool", "xml": {"name": "Write"}}, - "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, - } - _xml_map = {"name": "Logging"} - + version: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Version", "text": False, "unwrapped": False}, + ) + """The version of the logging properties. Required.""" + delete: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Delete", "text": False, "unwrapped": False}, + ) + """Whether delete operation is logged. Required.""" + read: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Read", "text": False, "unwrapped": False}, + ) + """Whether read operation is logged. Required.""" + write: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Write", "text": False, "unwrapped": False}, + ) + """Whether write operation is logged. Required.""" + retention_policy: "_models.RetentionPolicy" = rest_field( + name="retentionPolicy", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "RetentionPolicy", "text": False, "unwrapped": False}, + ) + """The retention policy of the logs. Required.""" + + _xml = {"attribute": False, "name": "Logging", "text": False, "unwrapped": False} + + @overload def __init__( self, *, @@ -229,55 +252,59 @@ def __init__( read: bool, write: bool, retention_policy: "_models.RetentionPolicy", - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword version: The version of Analytics to configure. Required. - :paramtype version: str - :keyword delete: Indicates whether all delete requests should be logged. Required. - :paramtype delete: bool - :keyword read: Indicates whether all read requests should be logged. Required. - :paramtype read: bool - :keyword write: Indicates whether all write requests should be logged. Required. - :paramtype write: bool - :keyword retention_policy: The retention policy. Required. - :paramtype retention_policy: ~azure.table.models.RetentionPolicy + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.version = version - self.delete = delete - self.read = read - self.write = write - self.retention_policy = retention_policy + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class Metrics(_serialization.Model): - """Metrics. - All required parameters must be populated in order to send to Azure. +class Metrics(_Model): + """The metrics properties. - :ivar version: The version of Analytics to configure. + :ivar version: The version of the metrics properties. :vartype version: str :ivar enabled: Indicates whether metrics are enabled for the Table service. Required. :vartype enabled: bool :ivar include_apis: Indicates whether metrics should generate summary statistics for called API operations. :vartype include_apis: bool - :ivar retention_policy: The retention policy. - :vartype retention_policy: ~azure.table.models.RetentionPolicy + :ivar retention_policy: The retention policy of the metrics. + :vartype retention_policy: ~azure.data.tables._generated.models.RetentionPolicy """ - _validation = { - "enabled": {"required": True}, - } - - _attribute_map = { - "version": {"key": "Version", "type": "str", "xml": {"name": "Version"}}, - "enabled": {"key": "Enabled", "type": "bool", "xml": {"name": "Enabled"}}, - "include_apis": {"key": "IncludeAPIs", "type": "bool", "xml": {"name": "IncludeAPIs"}}, - "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"}, - } - + version: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Version", "text": False, "unwrapped": False}, + ) + """The version of the metrics properties.""" + enabled: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Enabled", "text": False, "unwrapped": False}, + ) + """Indicates whether metrics are enabled for the Table service. Required.""" + include_apis: Optional[bool] = rest_field( + name="includeApis", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "IncludeAPIs", "text": False, "unwrapped": False}, + ) + """Indicates whether metrics should generate summary statistics for called API operations.""" + retention_policy: Optional["_models.RetentionPolicy"] = rest_field( + name="retentionPolicy", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "RetentionPolicy", "text": False, "unwrapped": False}, + ) + """The retention policy of the metrics.""" + + _xml = {"attribute": False, "name": "Metrics", "text": False, "unwrapped": False} + + @overload def __init__( self, *, @@ -285,182 +312,177 @@ def __init__( version: Optional[str] = None, include_apis: Optional[bool] = None, retention_policy: Optional["_models.RetentionPolicy"] = None, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword version: The version of Analytics to configure. - :paramtype version: str - :keyword enabled: Indicates whether metrics are enabled for the Table service. Required. - :paramtype enabled: bool - :keyword include_apis: Indicates whether metrics should generate summary statistics for called - API operations. - :paramtype include_apis: bool - :keyword retention_policy: The retention policy. - :paramtype retention_policy: ~azure.table.models.RetentionPolicy + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.version = version - self.enabled = enabled - self.include_apis = include_apis - self.retention_policy = retention_policy + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class RetentionPolicy(_serialization.Model): - """The retention policy. - All required parameters must be populated in order to send to Azure. +class RetentionPolicy(_Model): + """The retention policy. - :ivar enabled: Indicates whether a retention policy is enabled for the service. Required. + :ivar enabled: Whether to enable the retention policy. Required. :vartype enabled: bool :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted. :vartype days: int """ - _validation = { - "enabled": {"required": True}, - "days": {"minimum": 1}, - } - - _attribute_map = { - "enabled": {"key": "Enabled", "type": "bool", "xml": {"name": "Enabled"}}, - "days": {"key": "Days", "type": "int", "xml": {"name": "Days"}}, - } - _xml_map = {"name": "RetentionPolicy"} + enabled: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Enabled", "text": False, "unwrapped": False}, + ) + """Whether to enable the retention policy. Required.""" + days: Optional[int] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Days", "text": False, "unwrapped": False}, + ) + """Indicates the number of days that metrics or logging or soft-deleted data should be retained. + All data older than this value will be deleted.""" + + _xml = {"attribute": False, "name": "RetentionPolicy", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + ) -> None: ... - def __init__(self, *, enabled: bool, days: Optional[int] = None, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword enabled: Indicates whether a retention policy is enabled for the service. Required. - :paramtype enabled: bool - :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should - be retained. All data older than this value will be deleted. - :paramtype days: int + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.enabled = enabled - self.days = days + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class SignedIdentifier(_serialization.Model): - """A signed identifier. - All required parameters must be populated in order to send to Azure. +class SignedIdentifier(_Model): + """The signed identifier. - :ivar id: A unique id. Required. + :ivar id: The unique ID for the signed identifier. Required. :vartype id: str - :ivar access_policy: The access policy. - :vartype access_policy: ~azure.table.models.AccessPolicy + :ivar access_policy: The access policy for the signed identifier. + :vartype access_policy: ~azure.data.tables._generated.models.AccessPolicy """ - _validation = { - "id": {"required": True}, - } - - _attribute_map = { - "id": {"key": "Id", "type": "str", "xml": {"name": "Id"}}, - "access_policy": {"key": "AccessPolicy", "type": "AccessPolicy"}, - } - _xml_map = {"name": "SignedIdentifier"} - + id: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Id", "text": False, "unwrapped": False}, + ) + """The unique ID for the signed identifier. Required.""" + access_policy: Optional["_models.AccessPolicy"] = rest_field( + name="accessPolicy", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "AccessPolicy", "text": False, "unwrapped": False}, + ) + """The access policy for the signed identifier.""" + + _xml = {"attribute": False, "name": "SignedIdentifier", "text": False, "unwrapped": False} + + @overload def __init__( self, *, id: str, # pylint: disable=redefined-builtin access_policy: Optional["_models.AccessPolicy"] = None, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword id: A unique id. Required. - :paramtype id: str - :keyword access_policy: The access policy. - :paramtype access_policy: ~azure.table.models.AccessPolicy + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.id = id - self.access_policy = access_policy + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableEntityQueryResponse(_serialization.Model): - """The properties for the table entity query response. - - :ivar odata_metadata: The metadata response of the table. - :vartype odata_metadata: str - :ivar value: List of table entities. - :vartype value: list[dict[str, any]] - """ - - _attribute_map = { - "odata_metadata": {"key": "odata\\.metadata", "type": "str"}, - "value": {"key": "value", "type": "[{object}]"}, - } - def __init__( - self, *, odata_metadata: Optional[str] = None, value: Optional[List[Dict[str, Any]]] = None, **kwargs: Any - ) -> None: - """ - :keyword odata_metadata: The metadata response of the table. - :paramtype odata_metadata: str - :keyword value: List of table entities. - :paramtype value: list[dict[str, any]] - """ - super().__init__(**kwargs) - self.odata_metadata = odata_metadata - self.value = value +class SignedIdentifiers(_Model): + """Table signed identifiers. + :ivar identifiers: An array of signed identifiers. Required. + :vartype identifiers: ~azure.data.tables._generated.models.SignedIdentifier + """ -class TableProperties(_serialization.Model): - """The properties for creating a table. + identifiers: list["_models.SignedIdentifier"] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={ + "attribute": False, + "itemsName": "SignedIdentifier", + "name": "SignedIdentifier", + "text": False, + "unwrapped": True, + }, + ) + """An array of signed identifiers. Required.""" - :ivar table_name: The name of the table to create. - :vartype table_name: str - """ + _xml = {"attribute": False, "name": "SignedIdentifiers", "text": False, "unwrapped": False} - _attribute_map = { - "table_name": {"key": "TableName", "type": "str"}, - } + @overload + def __init__( + self, + *, + identifiers: list["_models.SignedIdentifier"], + ) -> None: ... - def __init__(self, *, table_name: Optional[str] = None, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword table_name: The name of the table to create. - :paramtype table_name: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.table_name = table_name + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableQueryResponse(_serialization.Model): - """The properties for the table query response. +class TableEntityQueryResponse(_Model): + """The properties for the table entity query response. :ivar odata_metadata: The metadata response of the table. :vartype odata_metadata: str - :ivar value: List of tables. - :vartype value: list[~azure.table.models.TableResponseProperties] + :ivar value: List of table entities. + :vartype value: list[dict[str, any]] """ - _attribute_map = { - "odata_metadata": {"key": "odata\\.metadata", "type": "str"}, - "value": {"key": "value", "type": "[TableResponseProperties]"}, - } + odata_metadata: Optional[str] = rest_field(name="odata.metadata", visibility=["read", "query"]) + """The metadata response of the table.""" + value: Optional[list[dict[str, Any]]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of table entities.""" + @overload def __init__( self, *, odata_metadata: Optional[str] = None, - value: Optional[List["_models.TableResponseProperties"]] = None, - **kwargs: Any - ) -> None: + value: Optional[list[dict[str, Any]]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword odata_metadata: The metadata response of the table. - :paramtype odata_metadata: str - :keyword value: List of tables. - :paramtype value: list[~azure.table.models.TableResponseProperties] + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.odata_metadata = odata_metadata - self.value = value + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableResponseProperties(_serialization.Model): +class TableProperties(_Model): """The properties for the table response. :ivar table_name: The name of the table. @@ -473,13 +495,16 @@ class TableResponseProperties(_serialization.Model): :vartype odata_edit_link: str """ - _attribute_map = { - "table_name": {"key": "TableName", "type": "str"}, - "odata_type": {"key": "odata\\.type", "type": "str"}, - "odata_id": {"key": "odata\\.id", "type": "str"}, - "odata_edit_link": {"key": "odata\\.editLink", "type": "str"}, - } + table_name: Optional[str] = rest_field(name="TableName", visibility=["read", "create", "update", "delete", "query"]) + """The name of the table.""" + odata_type: Optional[str] = rest_field(name="odata.type", visibility=["read", "query"]) + """The odata type of the table.""" + odata_id: Optional[str] = rest_field(name="odata.id", visibility=["read", "query"]) + """The id of the table.""" + odata_edit_link: Optional[str] = rest_field(name="odata.editLink", visibility=["read", "query"]) + """The edit link of the table.""" + @overload def __init__( self, *, @@ -487,27 +512,21 @@ def __init__( odata_type: Optional[str] = None, odata_id: Optional[str] = None, odata_edit_link: Optional[str] = None, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword table_name: The name of the table. - :paramtype table_name: str - :keyword odata_type: The odata type of the table. - :paramtype odata_type: str - :keyword odata_id: The id of the table. - :paramtype odata_id: str - :keyword odata_edit_link: The edit link of the table. - :paramtype odata_edit_link: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.table_name = table_name - self.odata_type = odata_type - self.odata_id = odata_id - self.odata_edit_link = odata_edit_link + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableResponse(TableResponseProperties): - """The response for a single table. + +class TableResponse(_Model): + """The table properties as returned in an echo response. :ivar table_name: The name of the table. :vartype table_name: str @@ -521,14 +540,18 @@ class TableResponse(TableResponseProperties): :vartype odata_metadata: str """ - _attribute_map = { - "table_name": {"key": "TableName", "type": "str"}, - "odata_type": {"key": "odata\\.type", "type": "str"}, - "odata_id": {"key": "odata\\.id", "type": "str"}, - "odata_edit_link": {"key": "odata\\.editLink", "type": "str"}, - "odata_metadata": {"key": "odata\\.metadata", "type": "str"}, - } - + table_name: Optional[str] = rest_field(name="TableName", visibility=["read", "create", "update", "delete", "query"]) + """The name of the table.""" + odata_type: Optional[str] = rest_field(name="odata.type", visibility=["read", "query"]) + """The odata type of the table.""" + odata_id: Optional[str] = rest_field(name="odata.id", visibility=["read", "query"]) + """The id of the table.""" + odata_edit_link: Optional[str] = rest_field(name="odata.editLink", visibility=["read", "query"]) + """The edit link of the table.""" + odata_metadata: Optional[str] = rest_field(name="odata.metadata", visibility=["read", "query"]) + """The metadata response of the table.""" + + @overload def __init__( self, *, @@ -537,117 +560,176 @@ def __init__( odata_id: Optional[str] = None, odata_edit_link: Optional[str] = None, odata_metadata: Optional[str] = None, - **kwargs: Any - ) -> None: + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword table_name: The name of the table. - :paramtype table_name: str - :keyword odata_type: The odata type of the table. - :paramtype odata_type: str - :keyword odata_id: The id of the table. - :paramtype odata_id: str - :keyword odata_edit_link: The edit link of the table. - :paramtype odata_edit_link: str - :keyword odata_metadata: The metadata response of the table. - :paramtype odata_metadata: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__( - table_name=table_name, odata_type=odata_type, odata_id=odata_id, odata_edit_link=odata_edit_link, **kwargs - ) - self.odata_metadata = odata_metadata + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableServiceError(_serialization.Model): - """Table Service error. + +class TablesError(_Model): + """Table JSON error. :ivar message: The error message. :vartype message: str """ - _attribute_map = { - "message": {"key": "Message", "type": "str", "xml": {"name": "Message"}}, - } + message: Optional[str] = rest_field(name="Message", visibility=["read", "create", "update", "delete", "query"]) + """The error message.""" + + @overload + def __init__( + self, + *, + message: Optional[str] = None, + ) -> None: ... - def __init__(self, *, message: Optional[str] = None, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword message: The error message. - :paramtype message: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.message = message - - -class TableServiceProperties(_serialization.Model): - """Table Service Properties. - - :ivar logging: Azure Analytics Logging settings. - :vartype logging: ~azure.table.models.Logging - :ivar hour_metrics: A summary of request statistics grouped by API in hourly aggregates for - tables. - :vartype hour_metrics: ~azure.table.models.Metrics - :ivar minute_metrics: A summary of request statistics grouped by API in minute aggregates for - tables. - :vartype minute_metrics: ~azure.table.models.Metrics - :ivar cors: The set of CORS rules. - :vartype cors: ~azure.table.models.CorsRule + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TableServiceError(_Model): + """The Tables service XML error. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str """ - _attribute_map = { - "logging": {"key": "Logging", "type": "Logging"}, - "hour_metrics": {"key": "HourMetrics", "type": "Metrics"}, - "minute_metrics": {"key": "MinuteMetrics", "type": "Metrics"}, - "cors": { - "key": "Cors", - "type": "[CorsRule]", - "xml": {"name": "Cors", "wrapped": True, "itemsName": "CorsRule"}, - }, - } - _xml_map = {"name": "StorageServiceProperties"} + code: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Code", "text": False, "unwrapped": False}, + ) + """The error code.""" + message: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Message", "text": False, "unwrapped": False}, + ) + """The error message.""" + + _xml = {"attribute": False, "name": "TablesServiceError", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + +class TableServiceProperties(_Model): + """The service properties. + + :ivar logging: The logging properties. + :vartype logging: ~azure.data.tables._generated.models.Logging + :ivar hour_metrics: The hour metrics properties. + :vartype hour_metrics: ~azure.data.tables._generated.models.Metrics + :ivar minute_metrics: The minute metrics properties. + :vartype minute_metrics: ~azure.data.tables._generated.models.Metrics + :ivar cors: The CORS properties. + :vartype cors: ~azure.data.tables._generated.models.CorsRule + """ + + logging: Optional["_models.Logging"] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Logging", "text": False, "unwrapped": False}, + ) + """The logging properties.""" + hour_metrics: Optional["_models.Metrics"] = rest_field( + name="hourMetrics", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "HourMetrics", "text": False, "unwrapped": False}, + ) + """The hour metrics properties.""" + minute_metrics: Optional["_models.Metrics"] = rest_field( + name="minuteMetrics", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "MinuteMetrics", "text": False, "unwrapped": False}, + ) + """The minute metrics properties.""" + cors: Optional[list["_models.CorsRule"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "itemsName": "CorsRule", "name": "Cors", "text": False, "unwrapped": False}, + ) + """The CORS properties.""" + + _xml = {"attribute": False, "name": "StorageServiceProperties", "text": False, "unwrapped": False} + + @overload def __init__( self, *, logging: Optional["_models.Logging"] = None, hour_metrics: Optional["_models.Metrics"] = None, minute_metrics: Optional["_models.Metrics"] = None, - cors: Optional[List["_models.CorsRule"]] = None, - **kwargs: Any - ) -> None: + cors: Optional[list["_models.CorsRule"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword logging: Azure Analytics Logging settings. - :paramtype logging: ~azure.table.models.Logging - :keyword hour_metrics: A summary of request statistics grouped by API in hourly aggregates for - tables. - :paramtype hour_metrics: ~azure.table.models.Metrics - :keyword minute_metrics: A summary of request statistics grouped by API in minute aggregates - for tables. - :paramtype minute_metrics: ~azure.table.models.Metrics - :keyword cors: The set of CORS rules. - :paramtype cors: ~azure.table.models.CorsRule + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.logging = logging - self.hour_metrics = hour_metrics - self.minute_metrics = minute_metrics - self.cors = cors + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class TableServiceStats(_serialization.Model): - """Stats for the service. +class TableServiceStats(_Model): + """Stats for the table service. :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. - :vartype geo_replication: ~azure.table.models.GeoReplication + :vartype geo_replication: ~azure.data.tables._generated.models.GeoReplication """ - _attribute_map = { - "geo_replication": {"key": "GeoReplication", "type": "GeoReplication"}, - } - _xml_map = {"name": "StorageServiceStats"} + geo_replication: Optional["_models.GeoReplication"] = rest_field( + name="geoReplication", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "GeoReplication", "text": False, "unwrapped": False}, + ) + """Geo-Replication information for the Secondary Storage Service.""" + + _xml = {"attribute": False, "name": "StorageServiceStats", "text": False, "unwrapped": False} - def __init__(self, *, geo_replication: Optional["_models.GeoReplication"] = None, **kwargs: Any) -> None: + @overload + def __init__( + self, + *, + geo_replication: Optional["_models.GeoReplication"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. - :paramtype geo_replication: ~azure.table.models.GeoReplication + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.geo_replication = geo_replication + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_patch.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_patch.py index f7dd32510333..87676c65a8f0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_patch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/models/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/__init__.py index 8467e05a2502..7b004d4708fa 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/__init__.py @@ -2,20 +2,26 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._operations import TableOperations -from ._operations import ServiceOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import TableOperations # type: ignore +from ._operations import ServiceOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "TableOperations", "ServiceOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_operations.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_operations.py index 712c2b61bea0..45b800e543fb 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_operations.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_operations.py @@ -1,16 +1,18 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import json +from typing import Any, Callable, IO, Literal, Optional, TypeVar, Union, overload +import urllib.parse -from azure.core import MatchConditions +from azure.core import MatchConditions, PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -18,23 +20,31 @@ ResourceModifiedError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) +from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer -from .._vendor import prep_if_match, prep_if_none_match +from .._configuration import AzureTableClientConfiguration +from .._utils.model_base import ( + SdkJSONEncoder, + _deserialize, + _deserialize_xml, + _failsafe_deserialize, + _failsafe_deserialize_xml, + _get_element, +) +from .._utils.serialization import Deserializer, Serializer +from .._utils.utils import prep_if_match, prep_if_none_match -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -53,7 +63,7 @@ def build_table_query_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -63,7 +73,7 @@ def build_table_query_request( if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") if top is not None: - _params["$top"] = _SERIALIZER.query("top", top, "int", minimum=0) + _params["$top"] = _SERIALIZER.query("top", top, "int") if select is not None: _params["$select"] = _SERIALIZER.query("select", select, "str") if filter is not None: @@ -72,8 +82,8 @@ def build_table_query_request( _params["NextTableName"] = _SERIALIZER.query("next_table_name", next_table_name, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -81,7 +91,6 @@ def build_table_query_request( def build_table_create_request( *, - json: _models.TableProperties, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, response_preference: Optional[Union[str, _models.ResponseFormat]] = None, **kwargs: Any @@ -89,9 +98,9 @@ def build_table_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + content_type: str = kwargs.pop("content_type") data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -102,21 +111,20 @@ def build_table_create_request( _params["$format"] = _SERIALIZER.query("format", format, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") if response_preference is not None: _headers["Prefer"] = _SERIALIZER.header("response_preference", response_preference, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, json=json, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_table_delete_request(table: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -128,7 +136,7 @@ def build_table_delete_request(table: str, **kwargs: Any) -> HttpRequest: _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="DELETE", url=_url, headers=_headers, **kwargs) @@ -137,11 +145,11 @@ def build_table_delete_request(table: str, **kwargs: Any) -> HttpRequest: def build_table_query_entities_request( table: str, *, - timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, top: Optional[int] = None, select: Optional[str] = None, filter: Optional[str] = None, + timeout: Optional[int] = None, next_partition_key: Optional[str] = None, next_row_key: Optional[str] = None, **kwargs: Any @@ -150,7 +158,7 @@ def build_table_query_entities_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -162,24 +170,24 @@ def build_table_query_entities_request( _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") if top is not None: - _params["$top"] = _SERIALIZER.query("top", top, "int", minimum=0) + _params["$top"] = _SERIALIZER.query("top", top, "int") if select is not None: _params["$select"] = _SERIALIZER.query("select", select, "str") if filter is not None: _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if next_partition_key is not None: _params["NextPartitionKey"] = _SERIALIZER.query("next_partition_key", next_partition_key, "str") if next_row_key is not None: _params["NextRowKey"] = _SERIALIZER.query("next_row_key", next_row_key, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -200,7 +208,7 @@ def build_table_query_entity_with_partition_and_row_key_request( # pylint: disa _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -215,7 +223,7 @@ def build_table_query_entity_with_partition_and_row_key_request( # pylint: disa # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") if select is not None: @@ -224,8 +232,8 @@ def build_table_query_entity_with_partition_and_row_key_request( # pylint: disa _params["$filter"] = _SERIALIZER.query("filter", filter, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -247,7 +255,7 @@ def build_table_update_entity_request( data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -262,13 +270,13 @@ def build_table_update_entity_request( # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -298,7 +306,7 @@ def build_table_merge_entity_request( data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -313,13 +321,13 @@ def build_table_merge_entity_request( # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -348,7 +356,7 @@ def build_table_delete_entity_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -363,13 +371,13 @@ def build_table_delete_entity_request( # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") if_match = prep_if_match(etag, match_condition) if if_match is not None: @@ -387,15 +395,14 @@ def build_table_insert_entity_request( timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, response_preference: Optional[Union[str, _models.ResponseFormat]] = None, - json: Optional[Dict[str, Any]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + content_type: str = kwargs.pop("content_type") data_service_version: Literal["3.0"] = kwargs.pop("data_service_version", _headers.pop("DataServiceVersion", "3.0")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/json;odata=minimalmetadata") # Construct URL @@ -408,32 +415,30 @@ def build_table_insert_entity_request( # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") if format is not None: _params["$format"] = _SERIALIZER.query("format", format, "str") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["DataServiceVersion"] = _SERIALIZER.header("data_service_version", data_service_version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") if response_preference is not None: _headers["Prefer"] = _SERIALIZER.header("response_preference", response_preference, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, json=json, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_table_get_access_policy_request(table: str, *, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/xml") # Construct URL - _url = "/{table}" + _url = "/{table}?comp=acl" path_format_arguments = { "table": _SERIALIZER.url("table", table, "str"), } @@ -442,29 +447,25 @@ def build_table_get_access_policy_request(table: str, *, timeout: Optional[int] # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_table_set_access_policy_request( - table: str, *, timeout: Optional[int] = None, content: Any = None, **kwargs: Any -) -> HttpRequest: +def build_table_set_access_policy_request(table: str, *, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/xml") # Construct URL - _url = "/{table}" + _url = "/{table}?comp=acl" path_format_arguments = { "table": _SERIALIZER.url("table", table, "str"), } @@ -473,66 +474,55 @@ def build_table_set_access_policy_request( # Construct parameters if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - _params["comp"] = _SERIALIZER.query("comp", comp, "str") + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_service_set_properties_request(*, content: Any, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: +def build_service_set_properties_request(*, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/xml") # Construct URL - _url = "/" + _url = "/?restype=service&comp=properties" # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_service_get_properties_request(*, timeout: Optional[int] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/xml") # Construct URL - _url = "/" + _url = "/?restype=service&comp=properties" # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -542,22 +532,18 @@ def build_service_get_statistics_request(*, timeout: Optional[int] = None, **kwa _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2019-02-02"] = kwargs.pop("version", _headers.pop("x-ms-version", "2019-02-02")) + api_version: str = kwargs.pop("api_version", _headers.pop("x-ms-version", "2019-02-02")) accept = _headers.pop("Accept", "application/xml") # Construct URL - _url = "/" + _url = "/?restype=service&comp=stats" # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-version"] = _SERIALIZER.header("api_version", api_version, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) @@ -569,18 +555,16 @@ class TableOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.table.AzureTable`'s + :class:`~azure.data.tables.AzureTableClient`'s :attr:`table` attribute. """ - models = _models - - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureTableClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def query( @@ -590,94 +574,92 @@ def query( top: Optional[int] = None, select: Optional[str] = None, filter: Optional[str] = None, - next_table_name: Optional[str] = None, **kwargs: Any - ) -> _models.TableQueryResponse: + ) -> ItemPaged["_models.TableProperties"]: """Queries tables under the given account. - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword top: Maximum number of records to return. Default value is None. + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword top: Specifies the maximum number of records to return. Default value is None. :paramtype top: int :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword next_table_name: A table query continuation token from a previous call. Default value - is None. - :paramtype next_table_name: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableQueryResponse - :rtype: ~azure.table.models.TableQueryResponse + :return: An iterator like instance of TableProperties + :rtype: ~azure.core.paging.ItemPaged[~azure.data.tables._generated.models.TableProperties] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - cls: ClsType[_models.TableQueryResponse] = kwargs.pop("cls", None) + cls: ClsType[list[_models.TableProperties]] = kwargs.pop("cls", None) - request = build_table_query_request( - format=format, - top=top, - select=select, - filter=filter, - next_table_name=next_table_name, - data_service_version=data_service_version, - version=self._config.version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - request.url = self._client.format_url(request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) + error_map.update(kwargs.pop("error_map", {}) or {}) - response = pipeline_response.http_response + def prepare_request(_continuation_token=None): + + _request = build_table_query_request( + format=format, + top=top, + select=select, + filter=filter, + next_table_name=_continuation_token, + data_service_version=data_service_version, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + list[_models.TableProperties], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return pipeline_response.http_response.headers.get("x-ms-continuation-NextTableName") or None, iter( + list_of_elem + ) - if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) + def get_next(_continuation_token=None): + _request = prepare_request(_continuation_token) - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-continuation-NextTableName"] = self._deserialize( - "str", response.headers.get("x-ms-continuation-NextTableName") - ) + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response - deserialized = self._deserialize("TableQueryResponse", pipeline_response) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) + raise HttpResponseError(response=response, model=error) - if cls: - return cls(pipeline_response, deserialized, response_headers) + return pipeline_response - return deserialized + return ItemPaged(get_next, extract_data) @distributed_trace def create( @@ -690,24 +672,22 @@ def create( ) -> Optional[_models.TableResponse]: """Creates a new table under the given account. - :param table_properties: The Table properties. Required. - :type table_properties: ~azure.table.models.TableProperties - :keyword format: Specifies the media type for the response. Known values are: + :param table_properties: The table properties to create. Required. + :type table_properties: ~azure.data.tables._generated.models.TableProperties + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword response_preference: Specifies whether the response should include the inserted entity - in the payload. Possible values are return-no-content and return-content. Known values are: + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword response_preference: Specifies whether the response should include the created table + in the + payload. Possible values are return-no-content and return-content. Known values are: "return-no-content" and "return-content". Default value is None. - :paramtype response_preference: str or ~azure.table.models.ResponseFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableResponse or None - :rtype: ~azure.table.models.TableResponse or None + :paramtype response_preference: str or ~azure.data.tables.models.ResponseFormat + :return: TableResponse or None. The TableResponse is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableResponse or None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -726,71 +706,82 @@ def create( ) cls: ClsType[Optional[_models.TableResponse]] = kwargs.pop("cls", None) - _json = self._serialize.body(table_properties, "TableProperties") + _content = json.dumps(table_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - request = build_table_create_request( + _request = build_table_create_request( format=format, response_preference=response_preference, - data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + data_service_version=data_service_version, + api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [201, 204]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) deserialized = None response_headers = {} if response.status_code == 201: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableResponse", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableResponse, response.json()) if response.status_code == 204: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Operation permanently deletes the specified table. + """Deletes an existing table. :param table: The name of the table. Required. :type table: str @@ -798,7 +789,7 @@ def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=inconsis :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -811,87 +802,86 @@ def delete(self, table: str, **kwargs: Any) -> None: # pylint: disable=inconsis cls: ClsType[None] = kwargs.pop("cls", None) - request = build_table_delete_request( + _request = build_table_delete_request( table=table, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace def query_entities( self, table: str, *, - timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, top: Optional[int] = None, select: Optional[str] = None, filter: Optional[str] = None, + timeout: Optional[int] = None, next_partition_key: Optional[str] = None, next_row_key: Optional[str] = None, **kwargs: Any ) -> _models.TableEntityQueryResponse: - """Queries entities in a table. + """Queries entities under the given table. :param table: The name of the table. Required. :type table: str - :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. - :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword top: Maximum number of records to return. Default value is None. + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat + :keyword top: Specifies the maximum number of records to return. Default value is None. :paramtype top: int :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword next_partition_key: An entity query continuation token from a previous call. Default - value is None. + :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. + :paramtype timeout: int + :keyword next_partition_key: An entity partition key query continuation token from a previous + call. Default value is None. :paramtype next_partition_key: str - :keyword next_row_key: An entity query continuation token from a previous call. Default value - is None. + :keyword next_row_key: An entity row key query continuation token from a previous call. Default + value is None. :paramtype next_row_key: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :return: TableEntityQueryResponse - :rtype: ~azure.table.models.TableEntityQueryResponse + :return: TableEntityQueryResponse. The TableEntityQueryResponse is compatible with + MutableMapping + :rtype: ~azure.data.tables._generated.models.TableEntityQueryResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -899,7 +889,7 @@ def query_entities( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( @@ -907,59 +897,70 @@ def query_entities( ) cls: ClsType[_models.TableEntityQueryResponse] = kwargs.pop("cls", None) - request = build_table_query_entities_request( + _request = build_table_query_entities_request( table=table, - timeout=timeout, format=format, top=top, select=select, filter=filter, + timeout=timeout, next_partition_key=next_partition_key, next_row_key=next_row_key, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["x-ms-continuation-NextPartitionKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextPartitionKey") ) response_headers["x-ms-continuation-NextRowKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextRowKey") ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableEntityQueryResponse", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.TableEntityQueryResponse, response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace def query_entity_with_partition_and_row_key( @@ -973,8 +974,8 @@ def query_entity_with_partition_and_row_key( select: Optional[str] = None, filter: Optional[str] = None, **kwargs: Any - ) -> Dict[str, Any]: - """Queries a single entity in a table. + ) -> dict[str, Any]: + """Retrieve a single entity. :param table: The name of the table. Required. :type table: str @@ -984,23 +985,20 @@ def query_entity_with_partition_and_row_key( :type row_key: str :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword select: Select expression using OData notation. Limits the columns on each record to - just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". Default value is None. + just those requested. Default value is None. :paramtype select: str :keyword filter: OData filter expression. Default value is None. :paramtype filter: str - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: dict mapping str to any :rtype: dict[str, any] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1008,15 +1006,15 @@ def query_entity_with_partition_and_row_key( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - cls: ClsType[Dict[str, Any]] = kwargs.pop("cls", None) + cls: ClsType[dict[str, Any]] = kwargs.pop("cls", None) - request = build_table_query_entity_with_partition_and_row_key_request( + _request = build_table_query_entity_with_partition_and_row_key_request( table=table, partition_key=partition_key, row_key=row_key, @@ -1025,36 +1023,37 @@ def query_entity_with_partition_and_row_key( select=select, filter=filter, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) response_headers["x-ms-continuation-NextPartitionKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextPartitionKey") @@ -1062,21 +1061,31 @@ def query_entity_with_partition_and_row_key( response_headers["x-ms-continuation-NextRowKey"] = self._deserialize( "str", response.headers.get("x-ms-continuation-NextRowKey") ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("{object}", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(dict[str, Any], response.json()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @overload - def update_entity( # pylint: disable=inconsistent-return-statements + def update_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1097,10 +1106,10 @@ def update_entity( # pylint: disable=inconsistent-return-statements :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1109,21 +1118,18 @@ def update_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def update_entity( # pylint: disable=inconsistent-return-statements + def update_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[IO] = None, + table_entity_properties: Optional[IO[bytes]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1141,13 +1147,13 @@ def update_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Default value is None. - :type table_entity_properties: IO + :type table_entity_properties: IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1156,9 +1162,6 @@ def update_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -1170,7 +1173,7 @@ def update_entity( # pylint: disable=inconsistent-return-statements table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Union[Dict[str, Any], IO]] = None, + table_entity_properties: Optional[Union[dict[str, Any], IO[bytes]]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1187,30 +1190,24 @@ def update_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Is either a {str: Any} - type or a IO type. Default value is None. - :type table_entity_properties: dict[str, any] or IO + type or a IO[bytes] type. Default value is None. + :type table_entity_properties: dict[str, any] or IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is None. :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1231,20 +1228,20 @@ def update_entity( # pylint: disable=inconsistent-return-statements "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if table_entity_properties else None cls: ClsType[None] = kwargs.pop("cls", None) - content_type = content_type or "application/json" - _json = None + content_type = content_type or "application/json" if table_entity_properties else None _content = None if isinstance(table_entity_properties, (IOBase, bytes)): _content = table_entity_properties else: if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_update_entity_request( + _request = build_table_update_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -1254,8 +1251,7 @@ def update_entity( # pylint: disable=inconsistent-return-statements match_condition=match_condition, data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1263,41 +1259,42 @@ def update_entity( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @overload - def merge_entity( # pylint: disable=inconsistent-return-statements + def merge_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1318,10 +1315,10 @@ def merge_entity( # pylint: disable=inconsistent-return-statements :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str @@ -1330,21 +1327,18 @@ def merge_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def merge_entity( # pylint: disable=inconsistent-return-statements + def merge_entity( self, table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[IO] = None, + table_entity_properties: Optional[IO[bytes]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1362,13 +1356,13 @@ def merge_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Default value is None. - :type table_entity_properties: IO + :type table_entity_properties: IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str @@ -1377,9 +1371,6 @@ def merge_entity( # pylint: disable=inconsistent-return-statements :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -1391,7 +1382,7 @@ def merge_entity( # pylint: disable=inconsistent-return-statements table: str, partition_key: str, row_key: str, - table_entity_properties: Optional[Union[Dict[str, Any], IO]] = None, + table_entity_properties: Optional[Union[dict[str, Any], IO[bytes]]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, @@ -1408,30 +1399,24 @@ def merge_entity( # pylint: disable=inconsistent-return-statements :param row_key: The row key of the entity. Required. :type row_key: str :param table_entity_properties: The properties for the table entity. Is either a {str: Any} - type or a IO type. Default value is None. - :type table_entity_properties: dict[str, any] or IO + type or a IO[bytes] type. Default value is None. + :type table_entity_properties: dict[str, any] or IO[bytes] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is None. :paramtype etag: str :keyword match_condition: The match condition to use upon the etag. Default value is None. :paramtype match_condition: ~azure.core.MatchConditions - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1452,20 +1437,20 @@ def merge_entity( # pylint: disable=inconsistent-return-statements "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if table_entity_properties else None cls: ClsType[None] = kwargs.pop("cls", None) - content_type = content_type or "application/json" - _json = None + content_type = content_type or "application/json" if table_entity_properties else None _content = None if isinstance(table_entity_properties, (IOBase, bytes)): _content = table_entity_properties else: if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_merge_entity_request( + _request = build_table_merge_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -1475,8 +1460,7 @@ def merge_entity( # pylint: disable=inconsistent-return-statements match_condition=match_condition, data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1484,33 +1468,34 @@ def merge_entity( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace def delete_entity( # pylint: disable=inconsistent-return-statements @@ -1539,18 +1524,15 @@ def delete_entity( # pylint: disable=inconsistent-return-statements :paramtype match_condition: ~azure.core.MatchConditions :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1564,7 +1546,7 @@ def delete_entity( # pylint: disable=inconsistent-return-statements error_map[412] = ResourceExistsError error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} data_service_version: Literal["3.0"] = kwargs.pop( @@ -1572,7 +1554,7 @@ def delete_entity( # pylint: disable=inconsistent-return-statements ) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_table_delete_entity_request( + _request = build_table_delete_entity_request( table=table, partition_key=partition_key, row_key=row_key, @@ -1581,75 +1563,74 @@ def delete_entity( # pylint: disable=inconsistent-return-statements timeout=timeout, format=format, data_service_version=data_service_version, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace def insert_entity( self, table: str, - table_entity_properties: Optional[Dict[str, Any]] = None, + table_entity_properties: Optional[dict[str, Any]] = None, *, timeout: Optional[int] = None, format: Optional[Union[str, _models.OdataMetadataFormat]] = None, response_preference: Optional[Union[str, _models.ResponseFormat]] = None, **kwargs: Any - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Insert entity in a table. :param table: The name of the table. Required. :type table: str - :param table_entity_properties: The properties for the table entity. Default value is None. + :param table_entity_properties: The entity properties to insert. Default value is None. :type table_entity_properties: dict[str, any] :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword format: Specifies the media type for the response. Known values are: + :keyword format: Specifies the metadata format for the response. Known values are: "application/json;odata=nometadata", "application/json;odata=minimalmetadata", and "application/json;odata=fullmetadata". Default value is None. - :paramtype format: str or ~azure.table.models.OdataMetadataFormat + :paramtype format: str or ~azure.data.tables.models.OdataMetadataFormat :keyword response_preference: Specifies whether the response should include the inserted entity - in the payload. Possible values are return-no-content and return-content. Known values are: + in the + payload. Possible values are return-no-content and return-content. Known values are: "return-no-content" and "return-content". Default value is None. - :paramtype response_preference: str or ~azure.table.models.ResponseFormat - :keyword data_service_version: Specifies the data service version. Default value is "3.0". Note - that overriding this default value may result in unsupported behavior. - :paramtype data_service_version: str + :paramtype response_preference: str or ~azure.data.tables.models.ResponseFormat :return: dict mapping str to any or None :rtype: dict[str, any] or None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1663,86 +1644,96 @@ def insert_entity( data_service_version: Literal["3.0"] = kwargs.pop( "data_service_version", _headers.pop("DataServiceVersion", "3.0") ) - content_type: str = kwargs.pop( + content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", "application/json;odata=nometadata") ) - cls: ClsType[Optional[Dict[str, Any]]] = kwargs.pop("cls", None) + content_type = content_type if table_entity_properties else None + cls: ClsType[Optional[dict[str, Any]]] = kwargs.pop("cls", None) if table_entity_properties is not None: - _json = self._serialize.body(table_entity_properties, "{object}") + _content = json.dumps(table_entity_properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore else: - _json = None + _content = None - request = build_table_insert_entity_request( + _request = build_table_insert_entity_request( table=table, timeout=timeout, format=format, response_preference=response_preference, - data_service_version=data_service_version, content_type=content_type, - version=self._config.version, - json=_json, + data_service_version=data_service_version, + api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [201, 204]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize( + _models.TablesError, + response, + ) raise HttpResponseError(response=response, model=error) deserialized = None response_headers = {} if response.status_code == 201: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("{object}", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(dict[str, Any], response.json()) if response.status_code == 204: + response_headers["Preference-Applied"] = self._deserialize( + "str", response.headers.get("Preference-Applied") + ) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Preference-Applied"] = self._deserialize( - "str", response.headers.get("Preference-Applied") - ) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace def get_access_policy( self, table: str, *, timeout: Optional[int] = None, **kwargs: Any - ) -> List[_models.SignedIdentifier]: + ) -> _models.SignedIdentifiers: """Retrieves details about any stored access policies specified on the table that may be used with Shared Access Signatures. @@ -1750,15 +1741,11 @@ def get_access_policy( :type table: str :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword comp: Required query string to handle stored access policies for the table that may be - used with Shared Access Signatures. Default value is "acl". Note that overriding this default - value may result in unsupported behavior. - :paramtype comp: str - :return: SignedIdentifier - :rtype: ~azure.table.models.SignedIdentifier + :return: SignedIdentifiers. The SignedIdentifiers is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.SignedIdentifiers :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1767,79 +1754,79 @@ def get_access_policy( error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None) + cls: ClsType[_models.SignedIdentifiers] = kwargs.pop("cls", None) - request = build_table_get_access_policy_request( + _request = build_table_get_access_policy_request( table=table, timeout=timeout, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("[SignedIdentifier]", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.SignedIdentifiers, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace def set_access_policy( # pylint: disable=inconsistent-return-statements - self, - table: str, - table_acl: Optional[List[_models.SignedIdentifier]] = None, - *, - timeout: Optional[int] = None, - **kwargs: Any + self, table: str, table_acl: _models.SignedIdentifiers, *, timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets stored access policies for the table that may be used with Shared Access Signatures. :param table: The name of the table. Required. :type table: str - :param table_acl: The acls for the table. Default value is None. - :type table_acl: ~azure.table.models.SignedIdentifier + :param table_acl: The access control list for the table. Required. + :type table_acl: ~azure.data.tables._generated.models.SignedIdentifiers :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword comp: Required query string to handle stored access policies for the table that may be - used with Shared Access Signatures. Default value is "acl". Note that overriding this default - value may result in unsupported behavior. - :paramtype comp: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1848,26 +1835,18 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) cls: ClsType[None] = kwargs.pop("cls", None) - serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True, "itemsName": "SignedIdentifier"}} - if table_acl is not None: - _content = self._serialize.body( - table_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt - ) - else: - _content = None + _content = _get_element(table_acl) - request = build_table_set_access_policy_request( + _request = build_table_set_access_policy_request( table=table, timeout=timeout, - comp=comp, content_type=content_type, - version=self._config.version, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1875,32 +1854,33 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore class ServiceOperations: @@ -1909,18 +1889,16 @@ class ServiceOperations: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.table.AzureTable`'s + :class:`~azure.data.tables.AzureTableClient`'s :attr:`service` attribute. """ - models = _models - - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureTableClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def set_properties( # pylint: disable=inconsistent-return-statements @@ -1929,21 +1907,15 @@ def set_properties( # pylint: disable=inconsistent-return-statements """Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. - :param table_service_properties: The Table Service properties. Required. - :type table_service_properties: ~azure.table.models.TableServiceProperties + :param table_service_properties: The table service properties to set. Required. + :type table_service_properties: ~azure.data.tables._generated.models.TableServiceProperties :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to set the service properties. Default value is - "service". Note that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to set the service properties. Default value is - "properties". Note that overriding this default value may result in unsupported behavior. - :paramtype comp: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1952,21 +1924,17 @@ def set_properties( # pylint: disable=inconsistent-return-statements error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml")) cls: ClsType[None] = kwargs.pop("cls", None) - _content = self._serialize.body(table_service_properties, "TableServiceProperties", is_xml=True) + _content = _get_element(table_service_properties) - request = build_service_set_properties_request( + _request = build_service_set_properties_request( timeout=timeout, - restype=restype, - comp=comp, content_type=content_type, - version=self._config.version, + api_version=self._config.api_version, content=_content, headers=_headers, params=_params, @@ -1974,31 +1942,32 @@ def set_properties( # pylint: disable=inconsistent-return-statements path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [202]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) if cls: - return cls(pipeline_response, None, response_headers) + return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _models.TableServiceProperties: @@ -2007,17 +1976,11 @@ def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _mo :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to set the service properties. Default value is - "service". Note that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to set the service properties. Default value is - "properties". Note that overriding this default value may result in unsupported behavior. - :paramtype comp: str - :return: TableServiceProperties - :rtype: ~azure.table.models.TableServiceProperties + :return: TableServiceProperties. The TableServiceProperties is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableServiceProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2026,52 +1989,59 @@ def get_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _mo error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) cls: ClsType[_models.TableServiceProperties] = kwargs.pop("cls", None) - request = build_service_get_properties_request( + _request = build_service_get_properties_request( timeout=timeout, - restype=restype, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableServiceProperties", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.TableServiceProperties, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore @distributed_trace def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _models.TableServiceStats: @@ -2081,17 +2051,11 @@ def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _mo :keyword timeout: The timeout parameter is expressed in seconds. Default value is None. :paramtype timeout: int - :keyword restype: Required query string to get service stats. Default value is "service". Note - that overriding this default value may result in unsupported behavior. - :paramtype restype: str - :keyword comp: Required query string to get service stats. Default value is "stats". Note that - overriding this default value may result in unsupported behavior. - :paramtype comp: str - :return: TableServiceStats - :rtype: ~azure.table.models.TableServiceStats + :return: TableServiceStats. The TableServiceStats is compatible with MutableMapping + :rtype: ~azure.data.tables._generated.models.TableServiceStats :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -2100,50 +2064,57 @@ def get_statistics(self, *, timeout: Optional[int] = None, **kwargs: Any) -> _mo error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + _params = kwargs.pop("params", {}) or {} - restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) - comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) cls: ClsType[_models.TableServiceStats] = kwargs.pop("cls", None) - request = build_service_get_statistics_request( + _request = build_service_get_statistics_request( timeout=timeout, - restype=restype, - comp=comp, - version=self._config.version, + api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.TableServiceError, pipeline_response) + error = _failsafe_deserialize_xml( + _models.TableServiceError, + response, + ) raise HttpResponseError(response=response, model=error) response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) response_headers["x-ms-client-request-id"] = self._deserialize( "str", response.headers.get("x-ms-client-request-id") ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - deserialized = self._deserialize("TableServiceStats", pipeline_response) + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.TableServiceStats, response.text()) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_patch.py b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_patch.py index f7dd32510333..87676c65a8f0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_patch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_generated/operations/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_generated/py.typed b/sdk/tables/azure-data-tables/azure/data/tables/_generated/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/tables/azure-data-tables/azure/data/tables/_generated/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_models.py b/sdk/tables/azure-data-tables/azure/data/tables/_models.py index e59d798d8d8e..b010e95fd460 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_models.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_models.py @@ -77,6 +77,7 @@ def __init__(self, **kwargs) -> None: # pylint: disable=super-init-not-called be UTC. :paramtype start: ~datetime.datetime or str """ + self._data = {} self.start = kwargs.get("start") self.expiry = kwargs.get("expiry") self.permission = kwargs.get("permission") @@ -99,6 +100,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-cal soft-deleted data should be retained. All data older than this value will be deleted. Must be specified if policy is enabled. """ + self._data = {} self.enabled = kwargs.get("enabled", False) self.days = kwargs.get("days") if self.enabled and (self.days is None): @@ -135,6 +137,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-cal :keyword ~azure.data.tables.TableRetentionPolicy retention_policy: The retention policy for the metrics. Default value is a TableRetentionPolicy object with default settings. """ + self._data = {} self.version = kwargs.get("version", "1.0") self.delete = kwargs.get("delete", False) self.read = kwargs.get("read", False) @@ -179,6 +182,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-cal :keyword ~azure.data.tables.TableRetentionPolicy retention_policy: The retention policy for the metrics. Default value is a TableRetentionPolicy object with default settings. """ + self._data = {} self.version = kwargs.get("version", "1.0") self.enabled = kwargs.get("enabled", False) self.include_apis = kwargs.get("include_apis") @@ -312,8 +316,8 @@ def _get_next_cb(self, continuation_token, **kwargs): # pylint: disable=inconsi def _extract_data_cb(self, get_next_return): self._location_mode, self._response, self._headers = get_next_return - props_list = [TableItem(t.table_name) for t in self._response.value] - return self._headers[NEXT_TABLE_NAME] or None, props_list + props_list = [TableItem(t.get("TableName", "")) for t in self._response.get("value", [])] + return self._headers.get(NEXT_TABLE_NAME) or None, props_list def _extract_continuation_token(continuation_token): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_batch.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_batch.py index 256efbf1fbdf..7c23e37faffb 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_batch.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_table_batch.py @@ -19,8 +19,8 @@ build_table_update_entity_request, build_table_delete_entity_request, ) -from ._generated._configuration import AzureTableConfiguration -from ._generated.aio._configuration import AzureTableConfiguration as AsyncAzureTableConfiguration +from ._generated._configuration import AzureTableClientConfiguration +from ._generated.aio._configuration import AzureTableClientConfiguration as AsyncAzureTableClientConfiguration EntityType = Union[TableEntity, Mapping[str, Any]] @@ -48,7 +48,7 @@ class TableBatchOperations(object): def __init__( self, - config: Union[AzureTableConfiguration, AsyncAzureTableConfiguration], + config: Union[AzureTableClientConfiguration, AsyncAzureTableClientConfiguration], endpoint: str, table_name: str, encoder: TableEntityEncoder, @@ -56,8 +56,8 @@ def __init__( ) -> None: """Create TableClient from a Credential. - :param config: An AzureTableConfiguration object. - :type config: ~azure.data.tables._generated._configuration.AzureTableConfiguration + :param config: An AzureTableClientConfiguration object. + :type config: ~azure.data.tables._generated._configuration.AzureTableClientConfiguration :param endpoint: The primary account URL. :type endpoint: str :param table_name: The name of the Table to perform operations on. @@ -126,7 +126,11 @@ def create(self, entity: EntityType, **kwargs) -> None: entity_json = self._encoder(entity) self._verify_partition_key(entity_json) request = build_table_insert_entity_request( - table=self.table_name, json=entity_json, version=self._config.version, **kwargs + table=self.table_name, + json=entity_json, + api_version=self._config.api_version, + content_type="application/json;odata=nometadata", + **kwargs, ) request.url = self._base_url + request.url self.requests.append(request) @@ -180,7 +184,7 @@ def update( etag=etag, match_condition=match_condition, json=entity_json, - version=self._config.version, + api_version=self._config.api_version, **kwargs, ) elif mode == UpdateMode.MERGE: @@ -191,7 +195,7 @@ def update( etag=etag, match_condition=match_condition, json=entity_json, - version=self._config.version, + api_version=self._config.api_version, **kwargs, ) if self._is_cosmos_endpoint: @@ -244,7 +248,7 @@ def delete( match_condition=_get_match_condition( etag=etag, match_condition=match_condition or MatchConditions.Unconditionally ), - version=self._config.version, + api_version=self._config.api_version, **kwargs, ) request.url = self._base_url + request.url @@ -280,7 +284,7 @@ def upsert(self, entity: EntityType, mode: Union[str, UpdateMode] = UpdateMode.M partition_key=_prepare_key(partition_key), # type: ignore[arg-type] row_key=_prepare_key(row_key), # type: ignore[arg-type] json=entity_json, - version=self._config.version, + api_version=self._config.api_version, **kwargs, ) elif mode == UpdateMode.MERGE: @@ -289,7 +293,7 @@ def upsert(self, entity: EntityType, mode: Union[str, UpdateMode] = UpdateMode.M partition_key=_prepare_key(partition_key), # type: ignore[arg-type] row_key=_prepare_key(row_key), # type: ignore[arg-type] json=entity_json, - version=self._config.version, + api_version=self._config.api_version, **kwargs, ) if self._is_cosmos_endpoint: diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py index 63f8c891038f..6e11596e7af0 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_table_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -26,7 +27,12 @@ _validate_tablename_error, _validate_key_values, ) -from ._generated.models import SignedIdentifier, TableProperties +from ._generated.models import ( + AccessPolicy as _AccessPolicy, + SignedIdentifier, + SignedIdentifiers as _SignedIdentifiers, + TableProperties, +) from ._serialize import ( serialize_iso, _parameter_filter_substitution, @@ -204,7 +210,7 @@ def get_table_access_policy(self, **kwargs) -> Dict[str, Optional[TableAccessPol except HttpResponseError as error: _process_table_error(error, table_name=self.table_name) output: Dict[str, Optional[TableAccessPolicy]] = {} - for identifier in cast(List[SignedIdentifier], identifiers): + for identifier in identifiers.identifiers or []: if identifier.access_policy: output[identifier.id] = TableAccessPolicy( start=deserialize_iso(identifier.access_policy.start), @@ -226,16 +232,35 @@ def set_table_access_policy(self, signed_identifiers: Mapping[str, Optional[Tabl """ identifiers = [] for key, value in signed_identifiers.items(): - payload = None - if value: - payload = TableAccessPolicy( - start=serialize_iso(value.start), - expiry=serialize_iso(value.expiry), - permission=value.permission, + if value is not None: + payload = _AccessPolicy( + start=serialize_iso(value.start), # type: ignore[arg-type] + expiry=serialize_iso(value.expiry), # type: ignore[arg-type] + permission=value.permission, # type: ignore[arg-type] ) + else: + payload = None identifiers.append(SignedIdentifier(id=key, access_policy=payload)) try: - self._client.table.set_access_policy(table=self.table_name, table_acl=identifiers or None, **kwargs) + if identifiers: + self._client.table.set_access_policy( + table=self.table_name, + table_acl=_SignedIdentifiers(identifiers=identifiers), + **kwargs, + ) + else: + from ._generated.operations._operations import build_table_set_access_policy_request + + _request = build_table_set_access_policy_request( + table=self.table_name, + content_type="application/xml", + api_version=self._client._config.api_version, # pylint: disable=protected-access + ) + _request.url = self._client._config.url + _request.url # pylint: disable=protected-access + pipeline_response = self._client.table._client.send_request( # pylint: disable=protected-access + _request, stream=False, **kwargs + ) + pipeline_response.raise_for_status() except HttpResponseError as error: try: _process_table_error(error, table_name=self.table_name) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py index cf09329bcd40..2c3509d4ad46 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_table_service_client.py @@ -4,7 +4,6 @@ # license information. # -------------------------------------------------------------------------- -import functools from typing import Optional, Any, Dict, List from azure.core.exceptions import HttpResponseError, ResourceExistsError from azure.core.paging import ItemPaged @@ -18,7 +17,6 @@ TableCorsRule, TableMetrics, TableAnalyticsLogging, - TablePropertiesPaged, service_stats_deserialize, service_properties_deserialize, ) @@ -28,6 +26,26 @@ from ._serialize import _parameter_filter_substitution +class _TableItemPaged: + """Wrapper around ItemPaged that re-processes table-specific errors.""" + + def __init__(self, paged): + self._paged = paged + + def __iter__(self): + return self + + def __next__(self): + try: + return next(self._paged) + except HttpResponseError as error: + _process_table_error(error) + raise # _process_table_error always raises, but this satisfies pylint/mypy + + def by_page(self, *args, **kwargs): + return self._paged.by_page(*args, **kwargs) + + class TableServiceClient(TablesBaseClient): """A client to interact with the Table Service at the account level. @@ -269,13 +287,14 @@ def query_tables( """ query_filter = _parameter_filter_substitution(parameters, query_filter) - command = functools.partial(self._client.table.query, **kwargs) - return ItemPaged( - command, - results_per_page=results_per_page, - filter=query_filter, - page_iterator_class=TablePropertiesPaged, - ) + return _TableItemPaged( + self._client.table.query( + top=results_per_page, + filter=query_filter, + cls=lambda items: [TableItem(i.table_name) for i in items], + **kwargs, + ) + ) # type: ignore[return-value] @distributed_trace def list_tables(self, *, results_per_page: Optional[int] = None, **kwargs) -> ItemPaged[TableItem]: @@ -295,12 +314,13 @@ def list_tables(self, *, results_per_page: Optional[int] = None, **kwargs) -> It :dedent: 16 :caption: Listing all tables in a storage account """ - command = functools.partial(self._client.table.query, **kwargs) - return ItemPaged( - command, - results_per_page=results_per_page, - page_iterator_class=TablePropertiesPaged, - ) + return _TableItemPaged( + self._client.table.query( + top=results_per_page, + cls=lambda items: [TableItem(i.table_name) for i in items], + **kwargs, + ) + ) # type: ignore[return-value] def get_table_client(self, table_name: str, **kwargs: Any) -> TableClient: """Get a client to interact with the specified table. diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_types.py b/sdk/tables/azure-data-tables/azure/data/tables/_types.py new file mode 100644 index 000000000000..88452d41f501 --- /dev/null +++ b/sdk/tables/azure-data-tables/azure/data/tables/_types.py @@ -0,0 +1,11 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Union + +EntityValueType = Union[str, int, float, bool] diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_version.py b/sdk/tables/azure-data-tables/azure/data/tables/_version.py index 91671987784d..2c3e0feeddce 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_version.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_version.py @@ -1,7 +1,9 @@ -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- VERSION = "12.8.0b1" diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py index e5b0307a1dc2..8f7ea53413b2 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py @@ -23,14 +23,22 @@ RequestIdPolicy, CustomHookPolicy, NetworkTraceLoggingPolicy, + SansIOHTTPPolicy, ) from azure.core.rest import HttpRequest from ._authentication_async import _configure_credential from .._common_conversion import _is_cosmos_endpoint, _get_account from .._constants import DEFAULT_STORAGE_ENDPOINT_SUFFIX -from .._generated.aio import AzureTable -from .._base_client import extract_batch_part_metadata, parse_query, format_query_string, get_api_version, AudienceType +from .._generated.aio import AzureTableClient as _AzureTableClient +from .._base_client import ( + extract_batch_part_metadata, + parse_query, + format_query_string, + get_api_version, + AudienceType, + _NoOpCredential, +) from .._error import ( RequestTooLargeError, TableTransactionError, @@ -124,10 +132,16 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential if self._cosmos_endpoint: self._policies.insert(0, CosmosPatchTransformPolicy()) - self._client = AzureTable(self.url, policies=kwargs.pop("policies", self._policies), **kwargs) + self._client = _AzureTableClient( + self.url, + credential=credential or _NoOpCredential(), # type: ignore[arg-type] + policies=kwargs.pop("policies", self._policies), + authentication_policy=kwargs.pop("authentication_policy", SansIOHTTPPolicy()), + **kwargs, + ) # Incompatible assignment when assigning a str value to a Literal type variable - self._client._config.version = get_api_version( - api_version, self._client._config.version + self._client._config.api_version = get_api_version( + api_version, self._client._config.api_version ) # type: ignore[assignment] @property @@ -194,7 +208,7 @@ def api_version(self) -> str: :return: The Storage API version. :type: str """ - return self._client._config.version # pylint: disable=protected-access + return self._client._config.api_version # pylint: disable=protected-access async def __aenter__(self) -> Self: await self._client.__aenter__() diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_models.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_models.py index fd82e55d5ae8..66ad6bacf137 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_models.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_models.py @@ -50,8 +50,8 @@ async def _get_next_cb(self, continuation_token, **kwargs): async def _extract_data_cb(self, get_next_return): self._location_mode, self._response, self._headers = get_next_return - props_list = [TableItem(t.table_name) for t in self._response.value] - return self._headers[NEXT_TABLE_NAME] or None, props_list + props_list = [TableItem(t.get("TableName", "")) for t in self._response.get("value", [])] + return self._headers.get(NEXT_TABLE_NAME) or None, props_list class TableEntityPropertiesPaged(AsyncPageIterator): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py index d8d69f8f1a01..025d2458420c 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -21,7 +22,12 @@ from .._encoder import TableEntityEncoder, EncoderMapType from .._entity import TableEntity from .._decoder import TableEntityDecoder, deserialize_iso, DecoderMapType -from .._generated.models import SignedIdentifier, TableProperties +from .._generated.models import ( + AccessPolicy as _AccessPolicy, + SignedIdentifier, + SignedIdentifiers as _SignedIdentifiers, + TableProperties, +) from .._models import TableAccessPolicy, TableItem, UpdateMode from .._serialize import ( serialize_iso, @@ -205,7 +211,7 @@ async def get_table_access_policy(self, **kwargs) -> Mapping[str, Optional[Table except HttpResponseError as error: _process_table_error(error, table_name=self.table_name) output = {} # type: Dict[str, Optional[TableAccessPolicy]] - for identifier in cast(List[SignedIdentifier], identifiers): + for identifier in identifiers.identifiers or []: if identifier.access_policy: output[identifier.id] = TableAccessPolicy( start=deserialize_iso(identifier.access_policy.start), @@ -229,16 +235,35 @@ async def set_table_access_policy( """ identifiers = [] for key, value in signed_identifiers.items(): - payload = None - if value: - payload = TableAccessPolicy( - start=serialize_iso(value.start), - expiry=serialize_iso(value.expiry), - permission=value.permission, + if value is not None: + payload = _AccessPolicy( + start=serialize_iso(value.start), # type: ignore[arg-type] + expiry=serialize_iso(value.expiry), # type: ignore[arg-type] + permission=value.permission, # type: ignore[arg-type] ) + else: + payload = None identifiers.append(SignedIdentifier(id=key, access_policy=payload)) try: - await self._client.table.set_access_policy(table=self.table_name, table_acl=identifiers or None, **kwargs) + if identifiers: + await self._client.table.set_access_policy( + table=self.table_name, + table_acl=_SignedIdentifiers(identifiers=identifiers), + **kwargs, + ) + else: + from .._generated.operations._operations import build_table_set_access_policy_request + + _request = build_table_set_access_policy_request( + table=self.table_name, + content_type="application/xml", + api_version=self._client._config.api_version, # pylint: disable=protected-access + ) + _request.url = self._client._config.url + _request.url # pylint: disable=protected-access + pipeline_response = await self._client.table._client.send_request( # pylint: disable=protected-access + _request, stream=False, **kwargs + ) + pipeline_response.raise_for_status() except HttpResponseError as error: try: _process_table_error(error, table_name=self.table_name) diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py index c8e632491218..f059e5387bc6 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_table_service_client_async.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import functools from typing import Optional, Any, Dict, List from azure.core.async_paging import AsyncItemPaged @@ -27,7 +26,26 @@ from .._serialize import _parameter_filter_substitution from ._table_client_async import TableClient from ._base_client_async import AsyncTablesBaseClient, AsyncTransportWrapper -from ._models import TablePropertiesPaged + + +class _AsyncTableItemPaged: + """Wrapper around AsyncItemPaged that re-processes table-specific errors.""" + + def __init__(self, paged): + self._paged = paged + + def __aiter__(self): + return self + + async def __anext__(self): + try: + return await self._paged.__anext__() + except HttpResponseError as error: + _process_table_error(error) + raise # _process_table_error always raises, but this satisfies pylint/mypy + + def by_page(self, *args, **kwargs): + return self._paged.by_page(*args, **kwargs) class TableServiceClient(AsyncTablesBaseClient): @@ -259,12 +277,13 @@ def list_tables(self, *, results_per_page: Optional[int] = None, **kwargs) -> As :dedent: 16 :caption: Listing all tables in an account """ - command = functools.partial(self._client.table.query, **kwargs) - return AsyncItemPaged( - command, - results_per_page=results_per_page, - page_iterator_class=TablePropertiesPaged, - ) + return _AsyncTableItemPaged( + self._client.table.query( + top=results_per_page, + cls=lambda items: [TableItem(i.table_name) for i in items], + **kwargs, + ) + ) # type: ignore[return-value] @distributed_trace def query_tables( @@ -295,13 +314,14 @@ def query_tables( :caption: Querying tables in an account given specific parameters """ query_filter = _parameter_filter_substitution(parameters, query_filter) - command = functools.partial(self._client.table.query, **kwargs) - return AsyncItemPaged( - command, - results_per_page=results_per_page, - filter=query_filter, - page_iterator_class=TablePropertiesPaged, - ) + return _AsyncTableItemPaged( + self._client.table.query( + top=results_per_page, + filter=query_filter, + cls=lambda items: [TableItem(i.table_name) for i in items], + **kwargs, + ) + ) # type: ignore[return-value] def get_table_client(self, table_name: str, **kwargs: Any) -> TableClient: """Get a client to interact with the specified table. diff --git a/sdk/tables/azure-data-tables/pyproject.toml b/sdk/tables/azure-data-tables/pyproject.toml index e9b975a7d2b3..030c2bdd2332 100644 --- a/sdk/tables/azure-data-tables/pyproject.toml +++ b/sdk/tables/azure-data-tables/pyproject.toml @@ -1,3 +1,65 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +[build-system] +requires = ["setuptools>=77.0.3", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-data-tables" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Corporation Azure Data Tables Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = ["azure", "azure sdk"] + +dependencies = [ + "isodate>=0.6.1", + "azure-core>=1.37.0", + "typing-extensions>=4.6.0", +] +dynamic = [ +"version", "readme" +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic] +version = {attr = "azure.data.tables._version.VERSION"} +readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.data", +] + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + [tool.azure-sdk-build] pyright = false black = true diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py index 8700228cd07d..ddecf23d1943 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_batching_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_batching_async.py index 3c0d09b16b76..1ce5df93a8c4 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_batching_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_batching_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_conditional_update_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_conditional_update_async.py index d3920cd8c17b..7fd5418a5a69 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_conditional_update_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_conditional_update_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_copy_table_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_copy_table_async.py index 13e812675435..e57bff8cd144 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_copy_table_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_copy_table_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_create_client_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_create_client_async.py index e7705a3f715c..964fdfe16b9c 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_create_client_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_create_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_create_delete_table_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_create_delete_table_async.py index 753891adbdea..ebe982201761 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_create_delete_table_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_create_delete_table_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_dataclass_model_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_dataclass_model_async.py index 4d2a80e24be8..c8f6f5ee9501 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_dataclass_model_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_dataclass_model_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_pydantic_model_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_pydantic_model_async.py index b7d151a0be37..d130d8b92b60 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_pydantic_model_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_encode_pydantic_model_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_insert_delete_entities_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_insert_delete_entities_async.py index 88bec6726acb..2a213efc4cf1 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_insert_delete_entities_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_insert_delete_entities_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py index 18368a589341..0eb99fd7addc 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_table_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py index d2b14bd0af90..7b4b9610c7a5 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_query_tables_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/async_samples/sample_update_upsert_merge_entities_async.py b/sdk/tables/azure-data-tables/samples/async_samples/sample_update_upsert_merge_entities_async.py index dbc0cc2dfe76..63166b0e587c 100644 --- a/sdk/tables/azure-data-tables/samples/async_samples/sample_update_upsert_merge_entities_async.py +++ b/sdk/tables/azure-data-tables/samples/async_samples/sample_update_upsert_merge_entities_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_authentication.py b/sdk/tables/azure-data-tables/samples/sample_authentication.py index a5514e89c243..c046f7a33748 100644 --- a/sdk/tables/azure-data-tables/samples/sample_authentication.py +++ b/sdk/tables/azure-data-tables/samples/sample_authentication.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_batching.py b/sdk/tables/azure-data-tables/samples/sample_batching.py index 28d8c69f1d54..0710bf871aae 100644 --- a/sdk/tables/azure-data-tables/samples/sample_batching.py +++ b/sdk/tables/azure-data-tables/samples/sample_batching.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_conditional_update.py b/sdk/tables/azure-data-tables/samples/sample_conditional_update.py index fa0d4f51d7b7..2f4492e0b786 100644 --- a/sdk/tables/azure-data-tables/samples/sample_conditional_update.py +++ b/sdk/tables/azure-data-tables/samples/sample_conditional_update.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_copy_table.py b/sdk/tables/azure-data-tables/samples/sample_copy_table.py index ead1d5790fc9..879ebd9e5f02 100644 --- a/sdk/tables/azure-data-tables/samples/sample_copy_table.py +++ b/sdk/tables/azure-data-tables/samples/sample_copy_table.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_create_client.py b/sdk/tables/azure-data-tables/samples/sample_create_client.py index 5e1b75207eef..c69ae7dde330 100644 --- a/sdk/tables/azure-data-tables/samples/sample_create_client.py +++ b/sdk/tables/azure-data-tables/samples/sample_create_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_create_delete_table.py b/sdk/tables/azure-data-tables/samples/sample_create_delete_table.py index 377f0c5e7df1..ebcbf693fdee 100644 --- a/sdk/tables/azure-data-tables/samples/sample_create_delete_table.py +++ b/sdk/tables/azure-data-tables/samples/sample_create_delete_table.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_encode_dataclass_model.py b/sdk/tables/azure-data-tables/samples/sample_encode_dataclass_model.py index 4263f8a87929..50766318c658 100644 --- a/sdk/tables/azure-data-tables/samples/sample_encode_dataclass_model.py +++ b/sdk/tables/azure-data-tables/samples/sample_encode_dataclass_model.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_encode_pydantic_model.py b/sdk/tables/azure-data-tables/samples/sample_encode_pydantic_model.py index 6170bc808cc2..78356f9fbdbb 100644 --- a/sdk/tables/azure-data-tables/samples/sample_encode_pydantic_model.py +++ b/sdk/tables/azure-data-tables/samples/sample_encode_pydantic_model.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License in the project root for diff --git a/sdk/tables/azure-data-tables/samples/sample_insert_delete_entities.py b/sdk/tables/azure-data-tables/samples/sample_insert_delete_entities.py index 199a0181822d..d76e507889de 100644 --- a/sdk/tables/azure-data-tables/samples/sample_insert_delete_entities.py +++ b/sdk/tables/azure-data-tables/samples/sample_insert_delete_entities.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_query_table.py b/sdk/tables/azure-data-tables/samples/sample_query_table.py index eedf56f62034..2c0aa67bd704 100644 --- a/sdk/tables/azure-data-tables/samples/sample_query_table.py +++ b/sdk/tables/azure-data-tables/samples/sample_query_table.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_query_tables.py b/sdk/tables/azure-data-tables/samples/sample_query_tables.py index a2310009a424..696dbb6c81f3 100644 --- a/sdk/tables/azure-data-tables/samples/sample_query_tables.py +++ b/sdk/tables/azure-data-tables/samples/sample_query_tables.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/samples/sample_update_upsert_merge_entities.py b/sdk/tables/azure-data-tables/samples/sample_update_upsert_merge_entities.py index b309b84d36e7..859c497af43a 100644 --- a/sdk/tables/azure-data-tables/samples/sample_update_upsert_merge_entities.py +++ b/sdk/tables/azure-data-tables/samples/sample_update_upsert_merge_entities.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/setup.py b/sdk/tables/azure-data-tables/setup.py deleted file mode 100644 index 0cbd57f6ea12..000000000000 --- a/sdk/tables/azure-data-tables/setup.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import re -import os.path -from io import open -from setuptools import find_packages, setup - -# Change the PACKAGE_NAME only to change folder and different name -PACKAGE_NAME = "azure-data-tables" -PACKAGE_PPRINT_NAME = "Azure Data Tables" - -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace("-", ".") - -# Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) - -if not version: - raise RuntimeError("Cannot find version information") - -with open("README.md", encoding="utf-8") as f: - readme = f.read() -with open("CHANGELOG.md", encoding="utf-8") as f: - changelog = f.read() - -setup( - name=PACKAGE_NAME, - version=version, - include_package_data=True, - description="Microsoft Azure {} Client Library for Python".format(PACKAGE_PPRINT_NAME), - long_description=readme + "\n\n" + changelog, - long_description_content_type="text/markdown", - license="MIT License", - author="Microsoft Corporation", - author_email="ascl@microsoft.com", - url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/tables/azure-data-tables", - keywords="azure, azure sdk", - classifiers=[ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "License :: OSI Approved :: MIT License", - ], - zip_safe=False, - packages=find_packages( - exclude=[ - # Exclude packages that will be covered by PEP420 or nspkg - "azure", - "tests", - "azure.data", - ] - ), - python_requires=">=3.9", - install_requires=[ - "azure-core>=1.29.4", - "yarl>=1.0", - "isodate>=0.6.1", - "typing-extensions>=4.3.0", - ], -) diff --git a/sdk/tables/azure-data-tables/tests/_shared/testcase.py b/sdk/tables/azure-data-tables/tests/_shared/testcase.py index 9d38ed317cb9..a644f5195819 100644 --- a/sdk/tables/azure-data-tables/tests/_shared/testcase.py +++ b/sdk/tables/azure-data-tables/tests/_shared/testcase.py @@ -483,11 +483,11 @@ def _assert_stats_unavailable(self, stats): @staticmethod def override_response_body_with_unavailable_status(response): - response.http_response.text = lambda _: SERVICE_UNAVAILABLE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_UNAVAILABLE_RESP_BODY @staticmethod def override_response_body_with_live_status(response): - response.http_response.text = lambda _: SERVICE_LIVE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_LIVE_RESP_BODY class ResponseCallback(object): diff --git a/sdk/tables/azure-data-tables/tests/perfstress_tests/_base.py b/sdk/tables/azure-data-tables/tests/perfstress_tests/_base.py index 8985c983da68..9c4b25e81cd4 100644 --- a/sdk/tables/azure-data-tables/tests/perfstress_tests/_base.py +++ b/sdk/tables/azure-data-tables/tests/perfstress_tests/_base.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. diff --git a/sdk/tables/azure-data-tables/tests/test_encoder.py b/sdk/tables/azure-data-tables/tests/test_encoder.py index 8a1754a9cc52..4b30ad951d37 100644 --- a/sdk/tables/azure-data-tables/tests/test_encoder.py +++ b/sdk/tables/azure-data-tables/tests/test_encoder.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_encoder_async.py b/sdk/tables/azure-data-tables/tests/test_encoder_async.py index 26f70107572d..f03dc69ed79b 100644 --- a/sdk/tables/azure-data-tables/tests/test_encoder_async.py +++ b/sdk/tables/azure-data-tables/tests/test_encoder_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_encoder_cosmos.py b/sdk/tables/azure-data-tables/tests/test_encoder_cosmos.py index 5369fdbb2306..6536de1642c4 100644 --- a/sdk/tables/azure-data-tables/tests/test_encoder_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_encoder_cosmos.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_encoder_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_encoder_cosmos_async.py index 6fad38e25f57..a89e382c6333 100644 --- a/sdk/tables/azure-data-tables/tests/test_encoder_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_encoder_cosmos_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_retry.py b/sdk/tables/azure-data-tables/tests/test_retry.py index 16d47a766d51..4bb830116ddf 100644 --- a/sdk/tables/azure-data-tables/tests/test_retry.py +++ b/sdk/tables/azure-data-tables/tests/test_retry.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/tables/azure-data-tables/tests/test_retry_cosmos.py b/sdk/tables/azure-data-tables/tests/test_retry_cosmos.py index 3b160469f9cb..f1ed2ba90ebf 100644 --- a/sdk/tables/azure-data-tables/tests/test_retry_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_retry_cosmos.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/tables/azure-data-tables/tests/test_table.py b/sdk/tables/azure-data-tables/tests/test_table.py index d63f498c6ec9..89c4bf092f20 100644 --- a/sdk/tables/azure-data-tables/tests/test_table.py +++ b/sdk/tables/azure-data-tables/tests/test_table.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- @@ -241,8 +242,8 @@ def test_query_tables_with_marker(self, tables_storage_account_name, tables_prim generator2 = ts.list_tables(results_per_page=2).by_page(continuation_token=generator1.continuation_token) next(generator2) - tables1 = generator1._current_page - tables2 = generator2._current_page + tables1 = list(generator1._current_page) + tables2 = list(generator2._current_page) # Assert assert len(tables1) == 2 diff --git a/sdk/tables/azure-data-tables/tests/test_table_async.py b/sdk/tables/azure-data-tables/tests/test_table_async.py index 6554b1895f4d..17e9a7e37e36 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch.py b/sdk/tables/azure-data-tables/tests/test_table_batch.py index d657ba4b701f..ad28c2c6b312 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_async.py b/sdk/tables/azure-data-tables/tests/test_table_batch_async.py index 0a9f0f70d109..a1e923322279 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py index 24661328fedb..cc8663be4312 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py index 43e7bf718e88..e731e799fb8b 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_client.py b/sdk/tables/azure-data-tables/tests/test_table_client.py index f4e11d11ab22..46b3aa7b7e4f 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -925,14 +926,14 @@ def test_create_service_client_with_custom_audience(self, client_class): def test_create_client_with_api_version(self): url = self.account_url(self.tables_storage_account_name, "table") client = TableServiceClient(url, credential=self.credential) - assert client._client._config.version == "2019-02-02" + assert client._client._config.api_version == "2019-02-02" table = client.get_table_client("tablename") - assert table._client._config.version == "2019-02-02" + assert table._client._config.api_version == "2019-02-02" client = TableServiceClient(url, credential=self.credential, api_version="2019-07-07") - assert client._client._config.version == "2019-07-07" + assert client._client._config.api_version == "2019-07-07" table = client.get_table_client("tablename") - assert table._client._config.version == "2019-07-07" + assert table._client._config.api_version == "2019-07-07" with pytest.raises(ValueError): TableServiceClient(url, credential=self.credential, api_version="foo") diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_async.py b/sdk/tables/azure-data-tables/tests/test_table_client_async.py index 52cf51514c97..b32e873fb5ea 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -965,14 +966,14 @@ async def test_closing_pipeline_client_simple_async(self): async def test_create_client_with_api_version(self): url = self.account_url(self.tables_storage_account_name, "table") client = TableServiceClient(url, credential=self.credential) - assert client._client._config.version == "2019-02-02" + assert client._client._config.api_version == "2019-02-02" table = client.get_table_client("tablename") - assert table._client._config.version == "2019-02-02" + assert table._client._config.api_version == "2019-02-02" client = TableServiceClient(url, credential=self.credential, api_version="2019-07-07") - assert client._client._config.version == "2019-07-07" + assert client._client._config.api_version == "2019-07-07" table = client.get_table_client("tablename") - assert table._client._config.version == "2019-07-07" + assert table._client._config.api_version == "2019-07-07" with pytest.raises(ValueError): TableServiceClient(url, credential=self.credential, api_version="foo") diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py index f45b9cb0a80c..a1187d74e31f 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py index 0e14849e0bff..5566ce0371b0 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_client_cosmos_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/tables/azure-data-tables/tests/test_table_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_cosmos.py index 0f749b44e779..bcf93a94b4f9 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_cosmos.py @@ -168,8 +168,8 @@ def test_query_tables_with_marker(self, tables_cosmos_account_name, tables_prima generator2 = ts.list_tables(results_per_page=2).by_page(continuation_token=generator1.continuation_token) next(generator2) - tables1 = generator1._current_page - tables2 = generator2._current_page + tables1 = list(generator1._current_page) + tables2 = list(generator2._current_page) # Assert assert len(tables1) == 2 diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity.py b/sdk/tables/azure-data-tables/tests/test_table_entity.py index 572b449a82f4..58ecdc6a48a7 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_async.py b/sdk/tables/azure-data-tables/tests/test_table_entity_async.py index 503dc9a857cd..5ae047a0c3eb 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py index 3fb42485a2e7..b9c808997245 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py index 303086dec8ec..f1f936c7c967 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_entity_cosmos_async.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/sdk/tables/azure-data-tables/tests/test_table_service_stats_async.py b/sdk/tables/azure-data-tables/tests/test_table_service_stats_async.py index 5cf30f4e3830..9ac653485a84 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_service_stats_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_service_stats_async.py @@ -27,11 +27,11 @@ class TestTableServiceStatsAsync(AzureRecordedTestCase, AsyncTableTestCase): @staticmethod def override_response_body_with_unavailable_status(response): - response.http_response.text = lambda _: SERVICE_UNAVAILABLE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_UNAVAILABLE_RESP_BODY @staticmethod def override_response_body_with_live_status(response): - response.http_response.text = lambda _: SERVICE_LIVE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_LIVE_RESP_BODY # --Test cases per service --------------------------------------- @tables_decorator_async diff --git a/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos.py index bfff2874b4a0..f5ebd36fe1ce 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos.py @@ -28,11 +28,11 @@ class TestTableServiceStatsCosmos(AzureRecordedTestCase, TableTestCase): @staticmethod def override_response_body_with_unavailable_status(response): - response.http_response.text = lambda _: SERVICE_UNAVAILABLE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_UNAVAILABLE_RESP_BODY @staticmethod def override_response_body_with_live_status(response): - response.http_response.text = lambda _: SERVICE_LIVE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_LIVE_RESP_BODY # TODO: Should we remove these both from cosmos sync/async? # --Test cases per service --------------------------------------- diff --git a/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos_async.py index 77a56445fcc1..1f70e40dfa36 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_service_stats_cosmos_async.py @@ -29,11 +29,11 @@ class TestTableServiceStatsCosmosAsync(AzureRecordedTestCase, AsyncTableTestCase): @staticmethod def override_response_body_with_unavailable_status(response): - response.http_response.text = lambda _: SERVICE_UNAVAILABLE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_UNAVAILABLE_RESP_BODY @staticmethod def override_response_body_with_live_status(response): - response.http_response.text = lambda _: SERVICE_LIVE_RESP_BODY + response.http_response.text = lambda *_: SERVICE_LIVE_RESP_BODY # --Test cases per service --------------------------------------- @pytest.mark.skip("JSON is invalid for cosmos") diff --git a/sdk/tables/azure-data-tables/tsp-location.yaml b/sdk/tables/azure-data-tables/tsp-location.yaml new file mode 100644 index 000000000000..5f865298b925 --- /dev/null +++ b/sdk/tables/azure-data-tables/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/cosmos-db/data-plane/Tables +commit: 623ace17cc16f11c09e2b5264e5e2d8c066c0ae9 +repo: Azure/azure-rest-api-specs +additionalDirectories: []