diff --git a/sdk/redis/azure-mgmt-redis/_meta.json b/sdk/redis/azure-mgmt-redis/_meta.json index 5887deead513..31730d942a67 100644 --- a/sdk/redis/azure-mgmt-redis/_meta.json +++ b/sdk/redis/azure-mgmt-redis/_meta.json @@ -1,11 +1,11 @@ { - "commit": "3aef2f96202eec656fca4abc4df0fbfe10b89ac0", + "commit": "b2cbd5328106a020f1aca87eea78d9308763338a", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest": "3.10.2", "use": [ - "@autorest/python@6.13.19", + "@autorest/python@6.19.0", "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/redis/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --tag=package-2024-03 --use=@autorest/python@6.13.19 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", + "autorest_command": "autorest specification/redis/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/redis/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_configuration.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_configuration.py index c7e98f894b0a..06436645fa9b 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_configuration.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_configuration.py @@ -28,13 +28,13 @@ class RedisManagementClientConfiguration: # pylint: disable=too-many-instance-a :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-11-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-03-01") + api_version: str = kwargs.pop("api_version", "2024-11-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_redis_management_client.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_redis_management_client.py index 85f943a596e8..313868b2df3e 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_redis_management_client.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_redis_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse @@ -65,7 +66,7 @@ class RedisManagementClient: # pylint: disable=client-accepts-api-version-keywo :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-11-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -150,7 +151,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "RedisManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_serialization.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_serialization.py index f0c6180722c8..8139854b97bb 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_serialization.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_vendor.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_version.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_version.py index 87c8699d573d..dc6920075e88 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_version.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "14.4.0" +VERSION = "12.0.0b1" diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_configuration.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_configuration.py index c608af027a55..d75622d8456a 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_configuration.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_configuration.py @@ -28,13 +28,13 @@ class RedisManagementClientConfiguration: # pylint: disable=too-many-instance-a :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-11-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-03-01") + api_version: str = kwargs.pop("api_version", "2024-11-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_redis_management_client.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_redis_management_client.py index bed1e9897324..1311c8066c8b 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_redis_management_client.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/_redis_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest @@ -66,7 +67,7 @@ class RedisManagementClient: # pylint: disable=client-accepts-api-version-keywo :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. Default value is "2024-03-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2024-11-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -153,7 +154,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "RedisManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_assignment_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_assignment_operations.py index 4ae84456d05a..025b2cd4e5d8 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_assignment_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_assignment_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._access_policy_assignment_operations import ( build_create_update_request, build_delete_request, @@ -73,7 +73,7 @@ async def _create_update_initial( access_policy_assignment_name: str, parameters: Union[_models.RedisCacheAccessPolicyAssignment, IO[bytes]], **kwargs: Any - ) -> _models.RedisCacheAccessPolicyAssignment: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -87,7 +87,7 @@ async def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisCacheAccessPolicyAssignment] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -109,10 +109,10 @@ async def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -120,15 +120,15 @@ async def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -250,10 +250,11 @@ async def begin_create_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -275,9 +276,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, cache_name: str, access_policy_assignment_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -290,7 +291,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -301,10 +302,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -312,6 +313,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -320,8 +325,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -349,7 +358,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cache_name=cache_name, access_policy_assignment_name=access_policy_assignment_name, @@ -359,6 +368,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -422,7 +432,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -437,7 +446,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -486,7 +495,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -502,7 +510,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_operations.py index b8cc7a7e7922..a16046d34b36 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_access_policy_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._access_policy_operations import ( build_create_update_request, build_delete_request, @@ -73,7 +73,7 @@ async def _create_update_initial( access_policy_name: str, parameters: Union[_models.RedisCacheAccessPolicy, IO[bytes]], **kwargs: Any - ) -> _models.RedisCacheAccessPolicy: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -87,7 +87,7 @@ async def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisCacheAccessPolicy] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -109,10 +109,10 @@ async def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -120,15 +120,15 @@ async def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -248,10 +248,11 @@ async def begin_create_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -273,9 +274,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, cache_name: str, access_policy_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -288,7 +289,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -299,10 +300,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -310,6 +311,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -318,8 +323,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -348,7 +357,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cache_name=cache_name, access_policy_name=access_policy_name, @@ -358,6 +367,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -422,7 +432,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -437,7 +446,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -486,7 +495,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -502,7 +510,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_async_operation_status_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_async_operation_status_operations.py index 90d28e8c9c66..da1c61caa98d 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_async_operation_status_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_async_operation_status_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._async_operation_status_operations import build_get_request if sys.version_info >= (3, 9): @@ -89,7 +87,6 @@ async def get(self, location: str, operation_id: str, **kwargs: Any) -> _models. headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -104,7 +101,7 @@ async def get(self, location: str, operation_id: str, **kwargs: Any) -> _models. error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("OperationStatus", pipeline_response) + deserialized = self._deserialize("OperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_firewall_rules_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_firewall_rules_operations.py index 9f433f75410f..415e9b40c698 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_firewall_rules_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_firewall_rules_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._firewall_rules_operations import ( build_create_or_update_request, build_delete_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -272,7 +268,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -287,11 +282,7 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) + deserialized = self._deserialize("RedisFirewallRule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -338,7 +329,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -353,7 +343,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) + deserialized = self._deserialize("RedisFirewallRule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -400,7 +390,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_linked_server_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_linked_server_operations.py index 88b910718e7e..4c6a0886b394 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_linked_server_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_linked_server_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._linked_server_operations import ( build_create_request, build_delete_request, @@ -73,7 +73,7 @@ async def _create_initial( linked_server_name: str, parameters: Union[_models.RedisLinkedServerCreateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisLinkedServerWithProperties: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -87,7 +87,7 @@ async def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisLinkedServerWithProperties] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -109,10 +109,10 @@ async def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -120,15 +120,15 @@ async def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -251,10 +251,11 @@ async def begin_create( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -276,9 +277,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, name: str, linked_server_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -291,7 +292,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -302,10 +303,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -313,6 +314,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -321,8 +326,12 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -351,7 +360,7 @@ async def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, name=name, linked_server_name=linked_server_name, @@ -361,6 +370,7 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -422,7 +432,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -437,7 +446,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -486,7 +495,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -502,7 +510,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_operations.py index d1dd176991de..d9726efbfb26 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request if sys.version_info >= (3, 9): @@ -87,7 +85,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -103,7 +100,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_patch_schedules_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_patch_schedules_operations.py index cc5674bd8203..c9258a17d84d 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_patch_schedules_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_patch_schedules_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._patch_schedules_operations import ( build_create_or_update_request, build_delete_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -273,7 +269,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -288,11 +283,7 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) + deserialized = self._deserialize("RedisPatchSchedule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -340,7 +331,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -399,7 +389,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -414,7 +403,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) + deserialized = self._deserialize("RedisPatchSchedule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_endpoint_connections_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_endpoint_connections_operations.py index 1db4cc0e1165..bc9e9fd60024 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_endpoint_connections_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_endpoint_connections_operations import ( build_delete_request, build_get_request, @@ -108,7 +108,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +123,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -195,7 +193,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -210,7 +207,7 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -224,7 +221,7 @@ async def _put_initial( private_endpoint_connection_name: str, properties: Union[_models.PrivateEndpointConnection, IO[bytes]], **kwargs: Any - ) -> _models.PrivateEndpointConnection: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -238,7 +235,7 @@ async def _put_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -260,10 +257,10 @@ async def _put_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -271,15 +268,15 @@ async def _put_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -399,10 +396,11 @@ async def begin_put( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -465,7 +463,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_link_resources_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_link_resources_operations.py index 9756719c239c..8623d16d0c41 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_link_resources_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_private_link_resources_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_list_by_redis_cache_request if sys.version_info >= (3, 9): @@ -97,7 +95,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -113,7 +110,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_redis_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_redis_operations.py index fdfd1f604754..0bd9521d6b8f 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_redis_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/aio/operations/_redis_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._redis_operations import ( build_check_name_availability_request, build_create_request, @@ -160,7 +160,6 @@ async def check_name_availability( # pylint: disable=inconsistent-return-statem headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -221,7 +220,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -237,7 +235,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -273,7 +270,7 @@ async def _create_initial( name: str, parameters: Union[_models.RedisCreateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -287,7 +284,7 @@ async def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -308,10 +305,10 @@ async def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -319,15 +316,15 @@ async def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -434,10 +431,11 @@ async def begin_create( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -465,7 +463,7 @@ async def _update_initial( name: str, parameters: Union[_models.RedisUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisResource: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -479,7 +477,7 @@ async def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisResource] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -500,10 +498,10 @@ async def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -511,15 +509,15 @@ async def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -626,10 +624,11 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -651,9 +650,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, name: str, **kwargs: Any - ) -> None: + async def _delete_initial(self, resource_group_name: str, name: str, **kwargs: Any) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -666,7 +663,7 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -676,10 +673,10 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -687,12 +684,20 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -716,7 +721,7 @@ async def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, name=name, api_version=api_version, @@ -725,6 +730,7 @@ async def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -781,7 +787,6 @@ async def get(self, resource_group_name: str, name: str, **kwargs: Any) -> _mode headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -796,7 +801,7 @@ async def get(self, resource_group_name: str, name: str, **kwargs: Any) -> _mode error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -838,7 +843,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -854,7 +858,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -915,7 +918,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -931,7 +933,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -997,7 +998,6 @@ async def list_keys(self, resource_group_name: str, name: str, **kwargs: Any) -> headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1012,7 +1012,7 @@ async def list_keys(self, resource_group_name: str, name: str, **kwargs: Any) -> error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisAccessKeys", pipeline_response) + deserialized = self._deserialize("RedisAccessKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1132,7 +1132,6 @@ async def regenerate_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1147,7 +1146,7 @@ async def regenerate_key( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisAccessKeys", pipeline_response) + deserialized = self._deserialize("RedisAccessKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1267,7 +1266,6 @@ async def force_reboot( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1282,20 +1280,20 @@ async def force_reboot( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisForceRebootResponse", pipeline_response) + deserialized = self._deserialize("RedisForceRebootResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - async def _import_data_initial( # pylint: disable=inconsistent-return-statements + async def _import_data_initial( self, resource_group_name: str, name: str, parameters: Union[_models.ImportRDBParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1309,7 +1307,7 @@ async def _import_data_initial( # pylint: disable=inconsistent-return-statement api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1330,10 +1328,10 @@ async def _import_data_initial( # pylint: disable=inconsistent-return-statement headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1341,12 +1339,20 @@ async def _import_data_initial( # pylint: disable=inconsistent-return-statement response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @overload async def begin_import_data( @@ -1434,7 +1440,7 @@ async def begin_import_data( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._import_data_initial( # type: ignore + raw_result = await self._import_data_initial( resource_group_name=resource_group_name, name=name, parameters=parameters, @@ -1445,6 +1451,7 @@ async def begin_import_data( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1466,13 +1473,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _export_data_initial( # pylint: disable=inconsistent-return-statements + async def _export_data_initial( self, resource_group_name: str, name: str, parameters: Union[_models.ExportRDBParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1486,7 +1493,7 @@ async def _export_data_initial( # pylint: disable=inconsistent-return-statement api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1507,10 +1514,10 @@ async def _export_data_initial( # pylint: disable=inconsistent-return-statement headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1518,12 +1525,20 @@ async def _export_data_initial( # pylint: disable=inconsistent-return-statement response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @overload async def begin_export_data( @@ -1611,7 +1626,7 @@ async def begin_export_data( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._export_data_initial( # type: ignore + raw_result = await self._export_data_initial( resource_group_name=resource_group_name, name=name, parameters=parameters, @@ -1622,6 +1637,7 @@ async def begin_export_data( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1645,7 +1661,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- async def _flush_cache_initial( self, resource_group_name: str, cache_name: str, **kwargs: Any - ) -> Optional[_models.OperationStatusResult]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1658,7 +1674,7 @@ async def _flush_cache_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.OperationStatusResult]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_flush_cache_request( resource_group_name=resource_group_name, @@ -1668,10 +1684,10 @@ async def _flush_cache_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1679,25 +1695,21 @@ async def _flush_cache_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - - deserialized = self._deserialize("OperationStatusResult", pipeline_response) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) - if response.status_code == 202: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -1738,6 +1750,7 @@ async def begin_flush_cache( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): @@ -1748,7 +1761,7 @@ def get_long_running_output(pipeline_response): "str", response.headers.get("Azure-AsyncOperation") ) - deserialized = self._deserialize("OperationStatusResult", pipeline_response) + deserialized = self._deserialize("OperationStatusResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/__init__.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/__init__.py index f0b603c2a559..d649dedeb15c 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/__init__.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/__init__.py @@ -79,6 +79,7 @@ from ._redis_management_client_enums import SkuName from ._redis_management_client_enums import TlsVersion from ._redis_management_client_enums import UpdateChannel +from ._redis_management_client_enums import ZonalAllocationPolicy from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk @@ -156,6 +157,7 @@ "SkuName", "TlsVersion", "UpdateChannel", + "ZonalAllocationPolicy", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_models_py3.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_models_py3.py index 7f751dcb1b27..2a87d343d01c 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_models_py3.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_models_py3.py @@ -1156,6 +1156,15 @@ class RedisCommonProperties(_serialization.Model): # pylint: disable=too-many-i :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy """ _attribute_map = { @@ -1170,6 +1179,7 @@ class RedisCommonProperties(_serialization.Model): # pylint: disable=too-many-i "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, "update_channel": {"key": "updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "zonalAllocationPolicy", "type": "str"}, } def __init__( @@ -1186,6 +1196,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, **kwargs: Any ) -> None: """ @@ -1226,6 +1237,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy """ super().__init__(**kwargs) self.redis_configuration = redis_configuration @@ -1239,6 +1259,7 @@ def __init__( self.public_network_access = public_network_access self.update_channel = update_channel self.disable_access_key_authentication = disable_access_key_authentication + self.zonal_allocation_policy = zonal_allocation_policy class RedisCommonPropertiesRedisConfiguration(_serialization.Model): # pylint: disable=too-many-instance-attributes @@ -1472,6 +1493,15 @@ class RedisCreateParameters(_serialization.Model): # pylint: disable=too-many-i :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. Required. :vartype sku: ~azure.mgmt.redis.models.Sku :ivar subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -1511,6 +1541,7 @@ class RedisCreateParameters(_serialization.Model): # pylint: disable=too-many-i "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, "update_channel": {"key": "properties.updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "properties.disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "properties.zonalAllocationPolicy", "type": "str"}, "sku": {"key": "properties.sku", "type": "Sku"}, "subnet_id": {"key": "properties.subnetId", "type": "str"}, "static_ip": {"key": "properties.staticIP", "type": "str"}, @@ -1535,6 +1566,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, subnet_id: Optional[str] = None, static_ip: Optional[str] = None, **kwargs: Any @@ -1585,6 +1617,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. Required. :paramtype sku: ~azure.mgmt.redis.models.Sku :keyword subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -1611,6 +1652,7 @@ def __init__( self.public_network_access = public_network_access self.update_channel = update_channel self.disable_access_key_authentication = disable_access_key_authentication + self.zonal_allocation_policy = zonal_allocation_policy self.sku = sku self.subnet_id = subnet_id self.static_ip = static_ip @@ -1656,6 +1698,15 @@ class RedisCreateProperties(RedisCommonProperties): # pylint: disable=too-many- :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. Required. :vartype sku: ~azure.mgmt.redis.models.Sku :ivar subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -1687,6 +1738,7 @@ class RedisCreateProperties(RedisCommonProperties): # pylint: disable=too-many- "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, "update_channel": {"key": "updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "zonalAllocationPolicy", "type": "str"}, "sku": {"key": "sku", "type": "Sku"}, "subnet_id": {"key": "subnetId", "type": "str"}, "static_ip": {"key": "staticIP", "type": "str"}, @@ -1707,6 +1759,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, subnet_id: Optional[str] = None, static_ip: Optional[str] = None, **kwargs: Any @@ -1749,6 +1802,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. Required. :paramtype sku: ~azure.mgmt.redis.models.Sku :keyword subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -1771,6 +1833,7 @@ def __init__( public_network_access=public_network_access, update_channel=update_channel, disable_access_key_authentication=disable_access_key_authentication, + zonal_allocation_policy=zonal_allocation_policy, **kwargs ) self.sku = sku @@ -2426,6 +2489,15 @@ class RedisProperties(RedisCreateProperties): # pylint: disable=too-many-instan :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. Required. :vartype sku: ~azure.mgmt.redis.models.Sku :ivar subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -2485,6 +2557,7 @@ class RedisProperties(RedisCreateProperties): # pylint: disable=too-many-instan "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, "update_channel": {"key": "updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "zonalAllocationPolicy", "type": "str"}, "sku": {"key": "sku", "type": "Sku"}, "subnet_id": {"key": "subnetId", "type": "str"}, "static_ip": {"key": "staticIP", "type": "str"}, @@ -2513,6 +2586,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, subnet_id: Optional[str] = None, static_ip: Optional[str] = None, **kwargs: Any @@ -2555,6 +2629,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. Required. :paramtype sku: ~azure.mgmt.redis.models.Sku :keyword subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -2577,6 +2660,7 @@ def __init__( public_network_access=public_network_access, update_channel=update_channel, disable_access_key_authentication=disable_access_key_authentication, + zonal_allocation_policy=zonal_allocation_policy, sku=sku, subnet_id=subnet_id, static_ip=static_ip, @@ -2770,6 +2854,15 @@ class RedisResource(TrackedResource): # pylint: disable=too-many-instance-attri :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. Required. :vartype sku: ~azure.mgmt.redis.models.Sku :ivar subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -2843,6 +2936,7 @@ class RedisResource(TrackedResource): # pylint: disable=too-many-instance-attri "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, "update_channel": {"key": "properties.updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "properties.disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "properties.zonalAllocationPolicy", "type": "str"}, "sku": {"key": "properties.sku", "type": "Sku"}, "subnet_id": {"key": "properties.subnetId", "type": "str"}, "static_ip": {"key": "properties.staticIP", "type": "str"}, @@ -2878,6 +2972,7 @@ def __init__( # pylint: disable=too-many-locals public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, subnet_id: Optional[str] = None, static_ip: Optional[str] = None, **kwargs: Any @@ -2928,6 +3023,15 @@ def __init__( # pylint: disable=too-many-locals :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. Required. :paramtype sku: ~azure.mgmt.redis.models.Sku :keyword subnet_id: The full resource ID of a subnet in a virtual network to deploy the Redis @@ -2952,6 +3056,7 @@ def __init__( # pylint: disable=too-many-locals self.public_network_access = public_network_access self.update_channel = update_channel self.disable_access_key_authentication = disable_access_key_authentication + self.zonal_allocation_policy = zonal_allocation_policy self.sku = sku self.subnet_id = subnet_id self.static_ip = static_ip @@ -3007,6 +3112,15 @@ class RedisUpdateParameters(_serialization.Model): # pylint: disable=too-many-i :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. :vartype sku: ~azure.mgmt.redis.models.Sku """ @@ -3028,6 +3142,7 @@ class RedisUpdateParameters(_serialization.Model): # pylint: disable=too-many-i "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, "update_channel": {"key": "properties.updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "properties.disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "properties.zonalAllocationPolicy", "type": "str"}, "sku": {"key": "properties.sku", "type": "Sku"}, } @@ -3047,6 +3162,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, sku: Optional["_models.Sku"] = None, **kwargs: Any ) -> None: @@ -3092,6 +3208,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. :paramtype sku: ~azure.mgmt.redis.models.Sku """ @@ -3109,6 +3234,7 @@ def __init__( self.public_network_access = public_network_access self.update_channel = update_channel self.disable_access_key_authentication = disable_access_key_authentication + self.zonal_allocation_policy = zonal_allocation_policy self.sku = sku @@ -3150,6 +3276,15 @@ class RedisUpdateProperties(RedisCommonProperties): # pylint: disable=too-many- :ivar disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :vartype disable_access_key_authentication: bool + :ivar zonal_allocation_policy: Optional: Specifies how availability zones are allocated to the + Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :vartype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :ivar sku: The SKU of the Redis cache to deploy. :vartype sku: ~azure.mgmt.redis.models.Sku """ @@ -3166,6 +3301,7 @@ class RedisUpdateProperties(RedisCommonProperties): # pylint: disable=too-many- "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, "update_channel": {"key": "updateChannel", "type": "str"}, "disable_access_key_authentication": {"key": "disableAccessKeyAuthentication", "type": "bool"}, + "zonal_allocation_policy": {"key": "zonalAllocationPolicy", "type": "str"}, "sku": {"key": "sku", "type": "Sku"}, } @@ -3183,6 +3319,7 @@ def __init__( public_network_access: Union[str, "_models.PublicNetworkAccess"] = "Enabled", update_channel: Optional[Union[str, "_models.UpdateChannel"]] = None, disable_access_key_authentication: bool = False, + zonal_allocation_policy: Optional[Union[str, "_models.ZonalAllocationPolicy"]] = None, sku: Optional["_models.Sku"] = None, **kwargs: Any ) -> None: @@ -3224,6 +3361,15 @@ def __init__( :keyword disable_access_key_authentication: Authentication to Redis through access keys is disabled when set as true. Default value is false. :paramtype disable_access_key_authentication: bool + :keyword zonal_allocation_policy: Optional: Specifies how availability zones are allocated to + the Redis cache. 'Automatic' enables zone redundancy and Azure will automatically select zones + based on regional availability and capacity. 'UserDefined' will select availability zones + passed in by you using the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If + 'zonalAllocationPolicy' is not passed, it will be set to 'UserDefined' when zones are passed + in, otherwise, it will be set to 'Automatic' in regions where zones are supported and 'NoZones' + in regions where zones are not supported. Known values are: "Automatic", "UserDefined", and + "NoZones". + :paramtype zonal_allocation_policy: str or ~azure.mgmt.redis.models.ZonalAllocationPolicy :keyword sku: The SKU of the Redis cache to deploy. :paramtype sku: ~azure.mgmt.redis.models.Sku """ @@ -3239,6 +3385,7 @@ def __init__( public_network_access=public_network_access, update_channel=update_channel, disable_access_key_authentication=disable_access_key_authentication, + zonal_allocation_policy=zonal_allocation_policy, **kwargs ) self.sku = sku diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_redis_management_client_enums.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_redis_management_client_enums.py index 5aafc27fb0b6..e192fc22d960 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_redis_management_client_enums.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/models/_redis_management_client_enums.py @@ -170,3 +170,18 @@ class UpdateChannel(str, Enum, metaclass=CaseInsensitiveEnumMeta): STABLE = "Stable" PREVIEW = "Preview" + + +class ZonalAllocationPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Optional: Specifies how availability zones are allocated to the Redis cache. 'Automatic' + enables zone redundancy and Azure will automatically select zones based on regional + availability and capacity. 'UserDefined' will select availability zones passed in by you using + the 'zones' parameter. 'NoZones' will produce a non-zonal cache. If 'zonalAllocationPolicy' is + not passed, it will be set to 'UserDefined' when zones are passed in, otherwise, it will be set + to 'Automatic' in regions where zones are supported and 'NoZones' in regions where zones are + not supported. + """ + + AUTOMATIC = "Automatic" + USER_DEFINED = "UserDefined" + NO_ZONES = "NoZones" diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_assignment_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_assignment_operations.py index b6bf569fd8b1..edc885c0f67d 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_assignment_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_assignment_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_create_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -96,7 +96,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -139,7 +139,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -180,7 +180,7 @@ def build_list_request(resource_group_name: str, cache_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +235,7 @@ def _create_update_initial( access_policy_assignment_name: str, parameters: Union[_models.RedisCacheAccessPolicyAssignment, IO[bytes]], **kwargs: Any - ) -> _models.RedisCacheAccessPolicyAssignment: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -249,7 +249,7 @@ def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisCacheAccessPolicyAssignment] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -271,10 +271,10 @@ def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -282,15 +282,15 @@ def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -412,10 +412,11 @@ def begin_create_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -437,9 +438,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, cache_name: str, access_policy_assignment_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -452,7 +453,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -463,10 +464,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -474,6 +475,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -482,8 +487,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -511,7 +520,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cache_name=cache_name, access_policy_assignment_name=access_policy_assignment_name, @@ -521,6 +530,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -584,7 +594,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -599,7 +608,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicyAssignment", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -647,7 +656,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -663,7 +671,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_operations.py index cb3d5c0c857e..a1d973e1ce5c 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_access_policy_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_create_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -96,7 +96,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -139,7 +139,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -180,7 +180,7 @@ def build_list_request(resource_group_name: str, cache_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +235,7 @@ def _create_update_initial( access_policy_name: str, parameters: Union[_models.RedisCacheAccessPolicy, IO[bytes]], **kwargs: Any - ) -> _models.RedisCacheAccessPolicy: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -249,7 +249,7 @@ def _create_update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisCacheAccessPolicy] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -271,10 +271,10 @@ def _create_update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -282,15 +282,15 @@ def _create_update_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -410,10 +410,11 @@ def begin_create_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -435,9 +436,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, cache_name: str, access_policy_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -450,7 +451,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -461,10 +462,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -472,6 +473,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -480,8 +485,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -510,7 +519,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cache_name=cache_name, access_policy_name=access_policy_name, @@ -520,6 +529,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -584,7 +594,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -599,7 +608,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response) + deserialized = self._deserialize("RedisCacheAccessPolicy", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -647,7 +656,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -663,7 +671,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_async_operation_status_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_async_operation_status_operations.py index 89eab597477b..f756c208349b 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_async_operation_status_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_async_operation_status_operations.py @@ -18,15 +18,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -43,7 +41,7 @@ def build_get_request(location: str, operation_id: str, subscription_id: str, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +119,6 @@ def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.Operat headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -136,7 +133,7 @@ def get(self, location: str, operation_id: str, **kwargs: Any) -> _models.Operat error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("OperationStatus", pipeline_response) + deserialized = self._deserialize("OperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_firewall_rules_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_firewall_rules_operations.py index 61940ada46e2..062417a21836 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_firewall_rules_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_firewall_rules_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -46,7 +44,7 @@ def build_list_request(resource_group_name: str, cache_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -81,7 +79,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -120,7 +118,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -156,7 +154,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -243,7 +241,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -259,7 +256,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -411,7 +407,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -426,11 +421,7 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) + deserialized = self._deserialize("RedisFirewallRule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -477,7 +468,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -492,7 +482,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisFirewallRule", pipeline_response) + deserialized = self._deserialize("RedisFirewallRule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -539,7 +529,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_linked_server_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_linked_server_operations.py index bc0d800a0542..8663132f9ac3 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_linked_server_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_linked_server_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -50,7 +50,7 @@ def build_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -87,7 +87,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +121,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -153,7 +153,7 @@ def build_list_request(resource_group_name: str, name: str, subscription_id: str _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -206,7 +206,7 @@ def _create_initial( linked_server_name: str, parameters: Union[_models.RedisLinkedServerCreateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisLinkedServerWithProperties: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -220,7 +220,7 @@ def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisLinkedServerWithProperties] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -242,10 +242,10 @@ def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -253,15 +253,15 @@ def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -381,10 +381,11 @@ def begin_create( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -406,9 +407,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, name: str, linked_server_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -421,7 +422,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -432,10 +433,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -443,6 +444,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -451,8 +456,12 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if response.status_code == 202: response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, response_headers) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -481,7 +490,7 @@ def begin_delete( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, name=name, linked_server_name=linked_server_name, @@ -491,6 +500,7 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -552,7 +562,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -567,7 +576,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response) + deserialized = self._deserialize("RedisLinkedServerWithProperties", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -615,7 +624,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -631,7 +639,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_operations.py index b7b018c6ad16..91868fb5c254 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -45,7 +43,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +107,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -125,7 +122,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_patch_schedules_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_patch_schedules_operations.py index 83d0d3c0fa9b..c20e8acfa4fb 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_patch_schedules_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_patch_schedules_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +46,7 @@ def build_list_by_redis_resource_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -83,7 +81,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -120,7 +118,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -154,7 +152,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -241,7 +239,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -257,7 +254,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -410,7 +406,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -425,11 +420,7 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) + deserialized = self._deserialize("RedisPatchSchedule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -477,7 +468,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -536,7 +526,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -551,7 +540,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisPatchSchedule", pipeline_response) + deserialized = self._deserialize("RedisPatchSchedule", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_endpoint_connections_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_endpoint_connections_operations.py index 4db65b8699c1..baabb4632a92 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_endpoint_connections_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_endpoint_connections_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_list_request(resource_group_name: str, cache_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -87,7 +87,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -129,7 +129,7 @@ def build_put_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -174,7 +174,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -266,7 +266,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -282,7 +281,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -353,7 +351,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -368,7 +365,7 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -382,7 +379,7 @@ def _put_initial( private_endpoint_connection_name: str, properties: Union[_models.PrivateEndpointConnection, IO[bytes]], **kwargs: Any - ) -> _models.PrivateEndpointConnection: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -396,7 +393,7 @@ def _put_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -418,10 +415,10 @@ def _put_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -429,15 +426,15 @@ def _put_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -557,10 +554,11 @@ def begin_put( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -623,7 +621,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_link_resources_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_link_resources_operations.py index 86bb76d14ee6..a4957e1fb7e4 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_link_resources_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_private_link_resources_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -47,7 +45,7 @@ def build_list_by_redis_cache_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -135,7 +133,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -151,7 +148,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_redis_operations.py b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_redis_operations.py index f5e98cb5d0ac..eddd9df48cf6 100644 --- a/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_redis_operations.py +++ b/sdk/redis/azure-mgmt-redis/azure/mgmt/redis/operations/_redis_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -48,7 +48,7 @@ def build_check_name_availability_request(subscription_id: str, **kwargs: Any) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -77,7 +77,7 @@ def build_list_upgrade_notifications_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -109,7 +109,7 @@ def build_create_request(resource_group_name: str, name: str, subscription_id: s _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -143,7 +143,7 @@ def build_update_request(resource_group_name: str, name: str, subscription_id: s _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -177,7 +177,7 @@ def build_delete_request(resource_group_name: str, name: str, subscription_id: s _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -208,7 +208,7 @@ def build_get_request(resource_group_name: str, name: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -239,7 +239,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -269,7 +269,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -293,7 +293,7 @@ def build_list_keys_request(resource_group_name: str, name: str, subscription_id _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -326,7 +326,7 @@ def build_regenerate_key_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -360,7 +360,7 @@ def build_force_reboot_request(resource_group_name: str, name: str, subscription _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -394,7 +394,7 @@ def build_import_data_request(resource_group_name: str, name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -428,7 +428,7 @@ def build_export_data_request(resource_group_name: str, name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -464,7 +464,7 @@ def build_flush_cache_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-03-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -596,7 +596,6 @@ def check_name_availability( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -657,7 +656,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -673,7 +671,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -709,7 +706,7 @@ def _create_initial( name: str, parameters: Union[_models.RedisCreateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -723,7 +720,7 @@ def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -744,10 +741,10 @@ def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -755,15 +752,15 @@ def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisResource", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -870,10 +867,11 @@ def begin_create( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -901,7 +899,7 @@ def _update_initial( name: str, parameters: Union[_models.RedisUpdateParameters, IO[bytes]], **kwargs: Any - ) -> _models.RedisResource: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -915,7 +913,7 @@ def _update_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.RedisResource] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -936,10 +934,10 @@ def _update_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -947,15 +945,15 @@ def _update_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("RedisResource", pipeline_response) - - if response.status_code == 202: - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1062,10 +1060,11 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1087,9 +1086,7 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, name: str, **kwargs: Any - ) -> None: + def _delete_initial(self, resource_group_name: str, name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1102,7 +1099,7 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_delete_request( resource_group_name=resource_group_name, @@ -1112,10 +1109,10 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1123,12 +1120,20 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) -> LROPoller[None]: @@ -1152,7 +1157,7 @@ def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) -> LR lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, name=name, api_version=api_version, @@ -1161,6 +1166,7 @@ def begin_delete(self, resource_group_name: str, name: str, **kwargs: Any) -> LR params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1217,7 +1223,6 @@ def get(self, resource_group_name: str, name: str, **kwargs: Any) -> _models.Red headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1232,7 +1237,7 @@ def get(self, resource_group_name: str, name: str, **kwargs: Any) -> _models.Red error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisResource", pipeline_response) + deserialized = self._deserialize("RedisResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1274,7 +1279,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1290,7 +1294,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1351,7 +1354,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -1367,7 +1369,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1433,7 +1434,6 @@ def list_keys(self, resource_group_name: str, name: str, **kwargs: Any) -> _mode headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1448,7 +1448,7 @@ def list_keys(self, resource_group_name: str, name: str, **kwargs: Any) -> _mode error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisAccessKeys", pipeline_response) + deserialized = self._deserialize("RedisAccessKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1568,7 +1568,6 @@ def regenerate_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1583,7 +1582,7 @@ def regenerate_key( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisAccessKeys", pipeline_response) + deserialized = self._deserialize("RedisAccessKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1703,7 +1702,6 @@ def force_reboot( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1718,20 +1716,20 @@ def force_reboot( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("RedisForceRebootResponse", pipeline_response) + deserialized = self._deserialize("RedisForceRebootResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - def _import_data_initial( # pylint: disable=inconsistent-return-statements + def _import_data_initial( self, resource_group_name: str, name: str, parameters: Union[_models.ImportRDBParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1745,7 +1743,7 @@ def _import_data_initial( # pylint: disable=inconsistent-return-statements api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1766,10 +1764,10 @@ def _import_data_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1777,12 +1775,20 @@ def _import_data_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @overload def begin_import_data( @@ -1870,7 +1876,7 @@ def begin_import_data( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._import_data_initial( # type: ignore + raw_result = self._import_data_initial( resource_group_name=resource_group_name, name=name, parameters=parameters, @@ -1881,6 +1887,7 @@ def begin_import_data( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1902,13 +1909,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _export_data_initial( # pylint: disable=inconsistent-return-statements + def _export_data_initial( self, resource_group_name: str, name: str, parameters: Union[_models.ExportRDBParameters, IO[bytes]], **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1922,7 +1929,7 @@ def _export_data_initial( # pylint: disable=inconsistent-return-statements api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1943,10 +1950,10 @@ def _export_data_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1954,12 +1961,20 @@ def _export_data_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @overload def begin_export_data( @@ -2047,7 +2062,7 @@ def begin_export_data( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._export_data_initial( # type: ignore + raw_result = self._export_data_initial( resource_group_name=resource_group_name, name=name, parameters=parameters, @@ -2058,6 +2073,7 @@ def begin_export_data( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2079,9 +2095,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _flush_cache_initial( - self, resource_group_name: str, cache_name: str, **kwargs: Any - ) -> Optional[_models.OperationStatusResult]: + def _flush_cache_initial(self, resource_group_name: str, cache_name: str, **kwargs: Any) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -2094,7 +2108,7 @@ def _flush_cache_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.OperationStatusResult]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_flush_cache_request( resource_group_name=resource_group_name, @@ -2104,10 +2118,10 @@ def _flush_cache_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2115,25 +2129,21 @@ def _flush_cache_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) - - deserialized = self._deserialize("OperationStatusResult", pipeline_response) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) - if response.status_code == 202: - response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) - response_headers["Azure-AsyncOperation"] = self._deserialize( - "str", response.headers.get("Azure-AsyncOperation") - ) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -2174,6 +2184,7 @@ def begin_flush_cache( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): @@ -2184,7 +2195,7 @@ def get_long_running_output(pipeline_response): "str", response.headers.get("Azure-AsyncOperation") ) - deserialized = self._deserialize("OperationStatusResult", pipeline_response) + deserialized = self._deserialize("OperationStatusResult", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_create_update.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_create_update.py index 71cfad9402d5..163aef7b963a 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_create_update.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -47,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyAssignmentCreateUpdate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyAssignmentCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_delete.py index 681657307d6f..491edc10b1a0 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyAssignmentDelete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyAssignmentDelete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_get.py index 18f74602bebc..0b3244ea513d 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyAssignmentGet.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyAssignmentGet.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_list.py index ddeb853f4476..8d8dc173b96f 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_assignment_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyAssignmentList.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyAssignmentList.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_create_update.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_create_update.py index 8fbb008b434e..2f03da9e34fd 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_create_update.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_create_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -41,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyCreateUpdate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyCreateUpdate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_delete.py index 12dd2fae2393..d14e747a6e02 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyDelete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyDelete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_get.py index d1f19d3306c6..62a1d2a3e6b0 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyGet.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyGet.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_list.py index 4f8697d8b8c3..bf7464cdba7a 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_access_policy_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAccessPolicyList.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAccessPolicyList.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_async_operation_status.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_async_operation_status.py index a70d50a3768d..75b7f4df16a0 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_async_operation_status.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_async_operation_status.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheAsyncOperationStatus.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheAsyncOperationStatus.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_check_name_availability.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_check_name_availability.py index 40936245f6ba..2c4ba0f0b857 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_check_name_availability.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_check_name_availability.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -37,6 +35,6 @@ def main(): ) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheCheckNameAvailability.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCheckNameAvailability.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create.py index b73a63587745..ec29f4f18716 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -36,7 +34,7 @@ def main(): resource_group_name="rg1", name="cache1", parameters={ - "location": "West US", + "location": "East US", "properties": { "enableNonSslPort": True, "minimumTlsVersion": "1.2", @@ -54,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheCreate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_automatic_zonal_allocation_policy.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_automatic_zonal_allocation_policy.py new file mode 100644 index 000000000000..1de2aff2dced --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_automatic_zonal_allocation_policy.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.redis import RedisManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-redis +# USAGE + python redis_cache_create_automatic_zonal_allocation_policy.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = RedisManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.redis.begin_create( + resource_group_name="rg1", + name="cache1", + parameters={ + "location": "East US", + "properties": { + "enableNonSslPort": True, + "minimumTlsVersion": "1.2", + "redisConfiguration": {"maxmemory-policy": "allkeys-lru"}, + "replicasPerPrimary": 2, + "shardCount": 2, + "sku": {"capacity": 1, "family": "P", "name": "Premium"}, + "staticIP": "192.168.0.5", + "subnetId": "/subscriptions/subid/resourceGroups/rg2/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", + "zonalAllocationPolicy": "Automatic", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreateAutomaticZonalAllocationPolicy.json +if __name__ == "__main__": + main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_default_version.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_default_version.py index 0f062b504355..72b80752bd1f 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_default_version.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_default_version.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -36,7 +34,7 @@ def main(): resource_group_name="rg1", name="cache1", parameters={ - "location": "West US", + "location": "East US", "properties": { "enableNonSslPort": True, "minimumTlsVersion": "1.2", @@ -53,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheCreateDefaultVersion.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreateDefaultVersion.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_latest_version.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_latest_version.py index 59b7b2e9d2c7..4074e4ed61a3 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_latest_version.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_latest_version.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -36,7 +34,7 @@ def main(): resource_group_name="rg1", name="cache1", parameters={ - "location": "West US", + "location": "East US", "properties": { "enableNonSslPort": True, "minimumTlsVersion": "1.2", @@ -54,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheCreateLatestVersion.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreateLatestVersion.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_no_zones_zonal_allocation_policy.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_no_zones_zonal_allocation_policy.py new file mode 100644 index 000000000000..7ddc2ee613cc --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_no_zones_zonal_allocation_policy.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.redis import RedisManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-redis +# USAGE + python redis_cache_create_no_zones_zonal_allocation_policy.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = RedisManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.redis.begin_create( + resource_group_name="rg1", + name="cache1", + parameters={ + "location": "East US", + "properties": { + "enableNonSslPort": True, + "minimumTlsVersion": "1.2", + "redisConfiguration": {"maxmemory-policy": "allkeys-lru"}, + "replicasPerPrimary": 2, + "shardCount": 2, + "sku": {"capacity": 1, "family": "P", "name": "Premium"}, + "staticIP": "192.168.0.5", + "subnetId": "/subscriptions/subid/resourceGroups/rg2/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", + "zonalAllocationPolicy": "NoZones", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreateNoZonesZonalAllocationPolicy.json +if __name__ == "__main__": + main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_user_defined_zonal_allocation_policy.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_user_defined_zonal_allocation_policy.py new file mode 100644 index 000000000000..7525f8158bd1 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_create_user_defined_zonal_allocation_policy.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.redis import RedisManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-redis +# USAGE + python redis_cache_create_user_defined_zonal_allocation_policy.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = RedisManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.redis.begin_create( + resource_group_name="rg1", + name="cache1", + parameters={ + "location": "East US", + "properties": { + "enableNonSslPort": True, + "minimumTlsVersion": "1.2", + "redisConfiguration": {"maxmemory-policy": "allkeys-lru"}, + "redisVersion": "Latest", + "replicasPerPrimary": 2, + "shardCount": 2, + "sku": {"capacity": 1, "family": "P", "name": "Premium"}, + "staticIP": "192.168.0.5", + "subnetId": "/subscriptions/subid/resourceGroups/rg2/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", + "zonalAllocationPolicy": "UserDefined", + }, + "zones": ["1"], + }, + ).result() + print(response) + + +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheCreateUserDefinedZonalAllocationPolicy.json +if __name__ == "__main__": + main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete.py index f6933ef73b2d..576e653c58e6 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheDelete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheDelete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete_private_endpoint_connection.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete_private_endpoint_connection.py index 0996ea17454f..6cc0e6dc98d9 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete_private_endpoint_connection.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_delete_private_endpoint_connection.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheDeletePrivateEndpointConnection.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheDeletePrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_export.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_export.py index a684c3c3c63c..13cbd18133f2 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_export.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_export.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -44,6 +42,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheExport.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheExport.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_create.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_create.py index b8467187f8c1..3bbe55d66819 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_create.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -41,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheFirewallRuleCreate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheFirewallRuleCreate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_delete.py index 5b84631722b3..242395d69b59 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_delete.py @@ -37,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheFirewallRuleDelete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheFirewallRuleDelete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_get.py index fbcaa5f05f2c..e84f85f9c2ab 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rule_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheFirewallRuleGet.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheFirewallRuleGet.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rules_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rules_list.py index b2172d09f8b9..32d85f657a89 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rules_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_firewall_rules_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheFirewallRulesList.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheFirewallRulesList.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_flush.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_flush.py index da642d2966d8..1f0e8ee414f3 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_flush.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_flush.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheFlush.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheFlush.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_force_reboot.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_force_reboot.py index 669a0c456233..b9c04a12b484 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_force_reboot.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_force_reboot.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -40,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheForceReboot.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheForceReboot.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get.py index 3b3395658015..373f3f9dd103 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheGet.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheGet.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get_private_endpoint_connection.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get_private_endpoint_connection.py index 4aa56493620d..c783efb5a18a 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get_private_endpoint_connection.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_get_private_endpoint_connection.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheGetPrivateEndpointConnection.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheGetPrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_import.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_import.py index f5735e206dd6..ec9e533722b2 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_import.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_import.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -43,6 +41,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheImport.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheImport.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_create.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_create.py index a50748bebc89..cdc45766023b 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_create.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -47,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheLinkedServer_Create.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheLinkedServer_Create.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_delete.py index 13415e16bcd5..21e5e4b27090 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheLinkedServer_Delete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheLinkedServer_Delete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_get.py index cbb859596948..7b01e275de67 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheLinkedServer_Get.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheLinkedServer_Get.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_list.py index 87d228c0a35d..61c031524bb5 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_linked_server_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheLinkedServer_List.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheLinkedServer_List.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list.py index ebdbad271316..47ba6ba53680 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheList.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheList.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_by_resource_group.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_by_resource_group.py index c40ea36064c8..e222ad6fe66f 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_by_resource_group.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_by_resource_group.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheListByResourceGroup.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_keys.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_keys.py index da1c92946a49..813bf7027360 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_keys.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_keys.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheListKeys.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheListKeys.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_endpoint_connections.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_endpoint_connections.py index b0be0f717747..9e936f3be251 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_endpoint_connections.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_endpoint_connections.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheListPrivateEndpointConnections.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheListPrivateEndpointConnections.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_link_resources.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_link_resources.py index 6185084859db..acce4f021a0b 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_link_resources.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_private_link_resources.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheListPrivateLinkResources.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheListPrivateLinkResources.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_upgrade_notifications.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_upgrade_notifications.py index aa5384df8a79..14e8c93d837e 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_upgrade_notifications.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_list_upgrade_notifications.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheListUpgradeNotifications.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheListUpgradeNotifications.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_operations.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_operations.py index 6a2444d70734..b80288dc2bed 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_operations.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_operations.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheOperations.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheOperations.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_create_or_update.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_create_or_update.py index 92dce38210e1..6bb35edd15d7 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_create_or_update.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_create_or_update.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -51,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCachePatchSchedulesCreateOrUpdate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCachePatchSchedulesCreateOrUpdate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_delete.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_delete.py index b50d55b6f02a..648444cddaa8 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_delete.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_delete.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -42,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCachePatchSchedulesDelete.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCachePatchSchedulesDelete.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_get.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_get.py index 49293679d19c..f929dc8aa5ff 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_get.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_get.py @@ -6,15 +6,10 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from .. import models as _models """ # PREREQUISITES pip install azure-identity @@ -43,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCachePatchSchedulesGet.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCachePatchSchedulesGet.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_list.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_list.py index e0f37773c98d..6e2dd23e532f 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_list.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_patch_schedules_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCachePatchSchedulesList.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCachePatchSchedulesList.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_put_private_endpoint_connection.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_put_private_endpoint_connection.py index 99d16d9fa93f..8c149ffc38fa 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_put_private_endpoint_connection.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_put_private_endpoint_connection.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -43,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCachePutPrivateEndpointConnection.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCachePutPrivateEndpointConnection.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_regenerate_key.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_regenerate_key.py index 5e688d4d86c5..7e436d040bdd 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_regenerate_key.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_regenerate_key.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -40,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheRegenerateKey.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheRegenerateKey.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_update.py b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_update.py index 9c78683060e4..e2c5a2248f9f 100644 --- a/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_update.py +++ b/sdk/redis/azure-mgmt-redis/generated_samples/redis_cache_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.redis import RedisManagementClient @@ -40,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-03-01/examples/RedisCacheUpdate.json +# x-ms-original-file: specification/redis/resource-manager/Microsoft.Cache/stable/2024-11-01/examples/RedisCacheUpdate.json if __name__ == "__main__": main() diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/conftest.py b/sdk/redis/azure-mgmt-redis/generated_tests/conftest.py new file mode 100644 index 000000000000..892795ddb3b3 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + redismanagement_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + redismanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + redismanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + redismanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=redismanagement_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=redismanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=redismanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=redismanagement_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations.py new file mode 100644 index 000000000000..997783fa0dc8 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAccessPolicyAssignmentOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update(self, resource_group): + response = self.client.access_policy_assignment.begin_create_update( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + parameters={ + "accessPolicyName": "str", + "id": "str", + "name": "str", + "objectId": "str", + "objectIdAlias": "str", + "provisioningState": "str", + "type": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.access_policy_assignment.begin_delete( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.access_policy_assignment.get( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.access_policy_assignment.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations_async.py new file mode 100644 index 000000000000..0b89ddfb4612 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_assignment_operations_async.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAccessPolicyAssignmentOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update(self, resource_group): + response = await ( + await self.client.access_policy_assignment.begin_create_update( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + parameters={ + "accessPolicyName": "str", + "id": "str", + "name": "str", + "objectId": "str", + "objectIdAlias": "str", + "provisioningState": "str", + "type": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.access_policy_assignment.begin_delete( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.access_policy_assignment.get( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_assignment_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.access_policy_assignment.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations.py new file mode 100644 index 000000000000..da49c57209ab --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAccessPolicyOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_update(self, resource_group): + response = self.client.access_policy.begin_create_update( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + parameters={"id": "str", "name": "str", "permissions": "str", "provisioningState": "str", "type": "str"}, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.access_policy.begin_delete( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.access_policy.get( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.access_policy.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations_async.py new file mode 100644 index 000000000000..d6970638d67a --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_access_policy_operations_async.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAccessPolicyOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_update(self, resource_group): + response = await ( + await self.client.access_policy.begin_create_update( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + parameters={ + "id": "str", + "name": "str", + "permissions": "str", + "provisioningState": "str", + "type": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.access_policy.begin_delete( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.access_policy.get( + resource_group_name=resource_group.name, + cache_name="str", + access_policy_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.access_policy.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations.py new file mode 100644 index 000000000000..6d40c674be9a --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAsyncOperationStatusOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.async_operation_status.get( + location="str", + operation_id="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations_async.py new file mode 100644 index 000000000000..496b54fb0987 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_async_operation_status_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementAsyncOperationStatusOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.async_operation_status.get( + location="str", + operation_id="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations.py new file mode 100644 index 000000000000..255829e27b5d --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementFirewallRulesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.firewall_rules.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.firewall_rules.create_or_update( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + parameters={"endIP": "str", "startIP": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.firewall_rules.get( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.firewall_rules.delete( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations_async.py new file mode 100644 index 000000000000..beaeb0166f78 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_firewall_rules_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementFirewallRulesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.firewall_rules.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.firewall_rules.create_or_update( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + parameters={"endIP": "str", "startIP": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.firewall_rules.get( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.firewall_rules.delete( + resource_group_name=resource_group.name, + cache_name="str", + rule_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations.py new file mode 100644 index 000000000000..7eb018a84463 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementLinkedServerOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.linked_server.begin_create( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + parameters={ + "linkedRedisCacheId": "str", + "linkedRedisCacheLocation": "str", + "serverRole": "str", + "geoReplicatedPrimaryHostName": "str", + "primaryHostName": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.linked_server.begin_delete( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.linked_server.get( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.linked_server.list( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations_async.py new file mode 100644 index 000000000000..25a3fef4cb14 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_linked_server_operations_async.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementLinkedServerOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.linked_server.begin_create( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + parameters={ + "linkedRedisCacheId": "str", + "linkedRedisCacheLocation": "str", + "serverRole": "str", + "geoReplicatedPrimaryHostName": "str", + "primaryHostName": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.linked_server.begin_delete( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.linked_server.get( + resource_group_name=resource_group.name, + name="str", + linked_server_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.linked_server.list( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations.py new file mode 100644 index 000000000000..4107a335d20b --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations_async.py new file mode 100644 index 000000000000..97de9971168a --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations.py new file mode 100644 index 000000000000..86d1ab5d529d --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPatchSchedulesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_redis_resource(self, resource_group): + response = self.client.patch_schedules.list_by_redis_resource( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.patch_schedules.create_or_update( + resource_group_name=resource_group.name, + name="str", + default="str", + parameters={ + "scheduleEntries": [{"dayOfWeek": "str", "startHourUtc": 0, "maintenanceWindow": "1 day, 0:00:00"}], + "id": "str", + "location": "str", + "name": "str", + "type": "str", + }, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.patch_schedules.delete( + resource_group_name=resource_group.name, + name="str", + default="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.patch_schedules.get( + resource_group_name=resource_group.name, + name="str", + default="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations_async.py new file mode 100644 index 000000000000..669b567ad7f9 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_patch_schedules_operations_async.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPatchSchedulesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_redis_resource(self, resource_group): + response = self.client.patch_schedules.list_by_redis_resource( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.patch_schedules.create_or_update( + resource_group_name=resource_group.name, + name="str", + default="str", + parameters={ + "scheduleEntries": [{"dayOfWeek": "str", "startHourUtc": 0, "maintenanceWindow": "1 day, 0:00:00"}], + "id": "str", + "location": "str", + "name": "str", + "type": "str", + }, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.patch_schedules.delete( + resource_group_name=resource_group.name, + name="str", + default="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.patch_schedules.get( + resource_group_name=resource_group.name, + name="str", + default="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..f9a4d024bc1a --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.private_endpoint_connections.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_put(self, resource_group): + response = self.client.private_endpoint_connections.begin_put( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + properties={ + "id": "str", + "name": "str", + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": {"actionsRequired": "str", "description": "str", "status": "str"}, + "provisioningState": "str", + "type": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.private_endpoint_connections.delete( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..edfad87b6aca --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_endpoint_connections_operations_async.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.private_endpoint_connections.list( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_put(self, resource_group): + response = await ( + await self.client.private_endpoint_connections.begin_put( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + properties={ + "id": "str", + "name": "str", + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "provisioningState": "str", + "type": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.private_endpoint_connections.delete( + resource_group_name=resource_group.name, + cache_name="str", + private_endpoint_connection_name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations.py new file mode 100644 index 000000000000..0d9c5ea18dc2 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_redis_cache(self, resource_group): + response = self.client.private_link_resources.list_by_redis_cache( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations_async.py new file mode 100644 index 000000000000..c1ba472b1abd --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_private_link_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_redis_cache(self, resource_group): + response = self.client.private_link_resources.list_by_redis_cache( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations.py new file mode 100644 index 000000000000..d287a4757344 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations.py @@ -0,0 +1,287 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementRedisOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_check_name_availability(self, resource_group): + response = self.client.redis.check_name_availability( + parameters={"name": "str", "type": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_upgrade_notifications(self, resource_group): + response = self.client.redis.list_upgrade_notifications( + resource_group_name=resource_group.name, + name="str", + history=0.0, + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.redis.begin_create( + resource_group_name=resource_group.name, + name="str", + parameters={ + "location": "str", + "sku": {"capacity": 0, "family": "str", "name": "str"}, + "disableAccessKeyAuthentication": False, + "enableNonSslPort": False, + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "minimumTlsVersion": "str", + "publicNetworkAccess": "Enabled", + "redisConfiguration": { + "aad-enabled": "str", + "aof-backup-enabled": "str", + "aof-storage-connection-string-0": "str", + "aof-storage-connection-string-1": "str", + "authnotrequired": "str", + "maxclients": "str", + "maxfragmentationmemory-reserved": "str", + "maxmemory-delta": "str", + "maxmemory-policy": "str", + "maxmemory-reserved": "str", + "notify-keyspace-events": "str", + "preferred-data-archive-auth-method": "str", + "preferred-data-persistence-auth-method": "str", + "rdb-backup-enabled": "str", + "rdb-backup-frequency": "str", + "rdb-backup-max-snapshot-count": "str", + "rdb-storage-connection-string": "str", + "storage-subscription-id": "str", + "zonal-configuration": "str", + }, + "redisVersion": "str", + "replicasPerMaster": 0, + "replicasPerPrimary": 0, + "shardCount": 0, + "staticIP": "str", + "subnetId": "str", + "tags": {"str": "str"}, + "tenantSettings": {"str": "str"}, + "updateChannel": "str", + "zonalAllocationPolicy": "str", + "zones": ["str"], + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.redis.begin_update( + resource_group_name=resource_group.name, + name="str", + parameters={ + "disableAccessKeyAuthentication": False, + "enableNonSslPort": False, + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "minimumTlsVersion": "str", + "publicNetworkAccess": "Enabled", + "redisConfiguration": { + "aad-enabled": "str", + "aof-backup-enabled": "str", + "aof-storage-connection-string-0": "str", + "aof-storage-connection-string-1": "str", + "authnotrequired": "str", + "maxclients": "str", + "maxfragmentationmemory-reserved": "str", + "maxmemory-delta": "str", + "maxmemory-policy": "str", + "maxmemory-reserved": "str", + "notify-keyspace-events": "str", + "preferred-data-archive-auth-method": "str", + "preferred-data-persistence-auth-method": "str", + "rdb-backup-enabled": "str", + "rdb-backup-frequency": "str", + "rdb-backup-max-snapshot-count": "str", + "rdb-storage-connection-string": "str", + "storage-subscription-id": "str", + "zonal-configuration": "str", + }, + "redisVersion": "str", + "replicasPerMaster": 0, + "replicasPerPrimary": 0, + "shardCount": 0, + "sku": {"capacity": 0, "family": "str", "name": "str"}, + "tags": {"str": "str"}, + "tenantSettings": {"str": "str"}, + "updateChannel": "str", + "zonalAllocationPolicy": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.redis.begin_delete( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.redis.get( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.redis.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_subscription(self, resource_group): + response = self.client.redis.list_by_subscription( + api_version="2024-11-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_keys(self, resource_group): + response = self.client.redis.list_keys( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_regenerate_key(self, resource_group): + response = self.client.redis.regenerate_key( + resource_group_name=resource_group.name, + name="str", + parameters={"keyType": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_force_reboot(self, resource_group): + response = self.client.redis.force_reboot( + resource_group_name=resource_group.name, + name="str", + parameters={"ports": [0], "rebootType": "str", "shardId": 0}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_import_data(self, resource_group): + response = self.client.redis.begin_import_data( + resource_group_name=resource_group.name, + name="str", + parameters={ + "files": ["str"], + "format": "str", + "preferred-data-archive-auth-method": "str", + "storage-subscription-id": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_export_data(self, resource_group): + response = self.client.redis.begin_export_data( + resource_group_name=resource_group.name, + name="str", + parameters={ + "container": "str", + "prefix": "str", + "format": "str", + "preferred-data-archive-auth-method": "str", + "storage-subscription-id": "str", + }, + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_flush_cache(self, resource_group): + response = self.client.redis.begin_flush_cache( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations_async.py b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations_async.py new file mode 100644 index 000000000000..1255bf8f8447 --- /dev/null +++ b/sdk/redis/azure-mgmt-redis/generated_tests/test_redis_management_redis_operations_async.py @@ -0,0 +1,300 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.redis.aio import RedisManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestRedisManagementRedisOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(RedisManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_check_name_availability(self, resource_group): + response = await self.client.redis.check_name_availability( + parameters={"name": "str", "type": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_upgrade_notifications(self, resource_group): + response = self.client.redis.list_upgrade_notifications( + resource_group_name=resource_group.name, + name="str", + history=0.0, + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.redis.begin_create( + resource_group_name=resource_group.name, + name="str", + parameters={ + "location": "str", + "sku": {"capacity": 0, "family": "str", "name": "str"}, + "disableAccessKeyAuthentication": False, + "enableNonSslPort": False, + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "minimumTlsVersion": "str", + "publicNetworkAccess": "Enabled", + "redisConfiguration": { + "aad-enabled": "str", + "aof-backup-enabled": "str", + "aof-storage-connection-string-0": "str", + "aof-storage-connection-string-1": "str", + "authnotrequired": "str", + "maxclients": "str", + "maxfragmentationmemory-reserved": "str", + "maxmemory-delta": "str", + "maxmemory-policy": "str", + "maxmemory-reserved": "str", + "notify-keyspace-events": "str", + "preferred-data-archive-auth-method": "str", + "preferred-data-persistence-auth-method": "str", + "rdb-backup-enabled": "str", + "rdb-backup-frequency": "str", + "rdb-backup-max-snapshot-count": "str", + "rdb-storage-connection-string": "str", + "storage-subscription-id": "str", + "zonal-configuration": "str", + }, + "redisVersion": "str", + "replicasPerMaster": 0, + "replicasPerPrimary": 0, + "shardCount": 0, + "staticIP": "str", + "subnetId": "str", + "tags": {"str": "str"}, + "tenantSettings": {"str": "str"}, + "updateChannel": "str", + "zonalAllocationPolicy": "str", + "zones": ["str"], + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.redis.begin_update( + resource_group_name=resource_group.name, + name="str", + parameters={ + "disableAccessKeyAuthentication": False, + "enableNonSslPort": False, + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {"clientId": "str", "principalId": "str"}}, + }, + "minimumTlsVersion": "str", + "publicNetworkAccess": "Enabled", + "redisConfiguration": { + "aad-enabled": "str", + "aof-backup-enabled": "str", + "aof-storage-connection-string-0": "str", + "aof-storage-connection-string-1": "str", + "authnotrequired": "str", + "maxclients": "str", + "maxfragmentationmemory-reserved": "str", + "maxmemory-delta": "str", + "maxmemory-policy": "str", + "maxmemory-reserved": "str", + "notify-keyspace-events": "str", + "preferred-data-archive-auth-method": "str", + "preferred-data-persistence-auth-method": "str", + "rdb-backup-enabled": "str", + "rdb-backup-frequency": "str", + "rdb-backup-max-snapshot-count": "str", + "rdb-storage-connection-string": "str", + "storage-subscription-id": "str", + "zonal-configuration": "str", + }, + "redisVersion": "str", + "replicasPerMaster": 0, + "replicasPerPrimary": 0, + "shardCount": 0, + "sku": {"capacity": 0, "family": "str", "name": "str"}, + "tags": {"str": "str"}, + "tenantSettings": {"str": "str"}, + "updateChannel": "str", + "zonalAllocationPolicy": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.redis.begin_delete( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.redis.get( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.redis.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_subscription(self, resource_group): + response = self.client.redis.list_by_subscription( + api_version="2024-11-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_keys(self, resource_group): + response = await self.client.redis.list_keys( + resource_group_name=resource_group.name, + name="str", + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_regenerate_key(self, resource_group): + response = await self.client.redis.regenerate_key( + resource_group_name=resource_group.name, + name="str", + parameters={"keyType": "str"}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_force_reboot(self, resource_group): + response = await self.client.redis.force_reboot( + resource_group_name=resource_group.name, + name="str", + parameters={"ports": [0], "rebootType": "str", "shardId": 0}, + api_version="2024-11-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_import_data(self, resource_group): + response = await ( + await self.client.redis.begin_import_data( + resource_group_name=resource_group.name, + name="str", + parameters={ + "files": ["str"], + "format": "str", + "preferred-data-archive-auth-method": "str", + "storage-subscription-id": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_export_data(self, resource_group): + response = await ( + await self.client.redis.begin_export_data( + resource_group_name=resource_group.name, + name="str", + parameters={ + "container": "str", + "prefix": "str", + "format": "str", + "preferred-data-archive-auth-method": "str", + "storage-subscription-id": "str", + }, + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_flush_cache(self, resource_group): + response = await ( + await self.client.redis.begin_flush_cache( + resource_group_name=resource_group.name, + cache_name="str", + api_version="2024-11-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/redis/azure-mgmt-redis/setup.py b/sdk/redis/azure-mgmt-redis/setup.py index d365e0297fbd..e673ef0b9339 100644 --- a/sdk/redis/azure-mgmt-redis/setup.py +++ b/sdk/redis/azure-mgmt-redis/setup.py @@ -75,6 +75,7 @@ }, install_requires=[ "isodate>=0.6.1", + "typing-extensions>=4.6.0", "azure-common>=1.1", "azure-mgmt-core>=1.3.2", ],