Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
overload, Tuple, TypeVar, Union, TYPE_CHECKING
)

from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError
from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError, ServiceResponseError
from azure.core.tracing.common import with_current_context

from ._shared.request_handlers import validate_and_format_range_headers
Expand Down Expand Up @@ -235,7 +235,7 @@ def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes, int]
try:
chunk_data = process_content(response, offset[0], offset[1], self.encryption_options)
retry_active = False
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
except (IncompleteReadError, HttpResponseError, DecodeError, ServiceResponseError) as error:
retry_total -= 1
if retry_total <= 0:
raise HttpResponseError(error, error=error) from error
Expand Down Expand Up @@ -522,7 +522,7 @@ def _initial_request(self):
self._encryption_options
)
retry_active = False
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
except (IncompleteReadError, HttpResponseError, DecodeError, ServiceResponseError) as error:
retry_total -= 1
if retry_total <= 0:
raise HttpResponseError(error, error=error) from error
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
Tuple, TypeVar, Union, TYPE_CHECKING
)

from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError
from azure.core.exceptions import DecodeError, HttpResponseError, IncompleteReadError, ServiceResponseError

from .._shared.request_handlers import validate_and_format_range_headers
from .._shared.response_handlers import parse_length_from_content_range, process_storage_error
Expand Down Expand Up @@ -144,7 +144,7 @@ async def _download_chunk(self, chunk_start: int, chunk_end: int) -> Tuple[bytes
try:
chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options)
retry_active = False
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
except (IncompleteReadError, HttpResponseError, DecodeError, ServiceResponseError) as error:
retry_total -= 1
if retry_total <= 0:
raise HttpResponseError(error, error=error) from error
Expand Down Expand Up @@ -432,7 +432,7 @@ async def _initial_request(self):
self._encryption_options
)
retry_active = False
except (IncompleteReadError, HttpResponseError, DecodeError) as error:
except (IncompleteReadError, HttpResponseError, DecodeError, ServiceResponseError) as error:
retry_total -= 1
if retry_total <= 0:
raise HttpResponseError(error, error=error) from error
Expand Down
50 changes: 49 additions & 1 deletion sdk/storage/azure-storage-blob/tests/test_retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ServiceResponseError
ServiceResponseError,
ServiceResponseTimeoutError
)
from azure.core.pipeline.transport import RequestsTransport
from azure.storage.blob._shared.authentication import AzureSigningError
Expand Down Expand Up @@ -639,4 +640,51 @@ def assert_exception_retry_hook(**kwargs):

assert retry_counter.count == 3

@BlobPreparer()
@recorded_by_proxy
def test_retry_on_service_response_error(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange
container_name = self.get_resource_name('utcontainer')
blob_name = self.get_resource_name('blob')
service = self._create_storage_service(
BlobServiceClient, storage_account_name, storage_account_key, max_block_size=4)
container = service.create_container(container_name)
data = b'abcd' * 4
container.upload_blob(blob_name, data, overwrite=True)

retry = LinearRetry(backoff=1, random_jitter_range=1)
retry_counter = RetryCounter()
retry_service = self._create_storage_service(
BlobServiceClient,
storage_account_name,
storage_account_key,
retry_policy=retry,
max_block_size=4
)
blob = retry_service.get_blob_client(container_name, blob_name)

# Mock the internal response to raise ServiceResponseError on first chunk processing
from azure.storage.blob._download import process_content as real_process_content

def mock_process_content_with_error(response, start_offset, end_offset, encryption):
retry_counter.simple_count(retry)
conn_error = AzureError("Connection reset by peer")
if retry_counter.count == 1:
raise ServiceResponseError(conn_error, error=conn_error)
elif retry_counter.count == 2:
raise ServiceResponseTimeoutError(conn_error, error=conn_error)
return real_process_content(response, start_offset, end_offset, encryption)

# Act
try:
with mock.patch('azure.storage.blob._download.process_content', side_effect=mock_process_content_with_error):
downloaded_data = blob.download_blob().readall()
assert downloaded_data == data
assert retry_counter.count >= 3
finally:
service.delete_container(container_name)

# ------------------------------------------------------------------------------
50 changes: 49 additions & 1 deletion sdk/storage/azure-storage-blob/tests/test_retry_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
IncompleteReadError,
HttpResponseError,
ResourceExistsError,
ServiceResponseError
ServiceResponseError,
ServiceResponseTimeoutError
)
from azure.core.pipeline.transport import AioHttpTransport
from azure.storage.blob import LocationMode
Expand Down Expand Up @@ -618,4 +619,51 @@ def assert_exception_retry_hook(**kwargs):

assert retry_counter.count == 3

@BlobPreparer()
@recorded_by_proxy_async
async def test_retry_on_service_response_error(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange
container_name = self.get_resource_name('utcontainer')
blob_name = self.get_resource_name('blob')
service = self._create_storage_service(
BlobServiceClient, storage_account_name, storage_account_key, max_block_size=4)
container = await service.create_container(container_name)
data = b'abcd' * 4
await container.upload_blob(blob_name, data, overwrite=True)

retry = LinearRetry(backoff=1, random_jitter_range=1)
retry_counter = RetryCounter()
retry_service = self._create_storage_service(
BlobServiceClient,
storage_account_name,
storage_account_key,
retry_policy=retry,
max_block_size=4
)
blob = retry_service.get_blob_client(container_name, blob_name)

# Mock the internal response to raise ServiceResponseError on first chunk processing
from azure.storage.blob.aio._download_async import process_content as real_process_content

async def mock_process_content_with_error(response, start_offset, end_offset, encryption):
retry_counter.simple_count(retry)
conn_error = AzureError("Connection reset by peer")
if retry_counter.count == 1:
raise ServiceResponseError(conn_error, error=conn_error)
elif retry_counter.count == 2:
raise ServiceResponseTimeoutError(conn_error, error=conn_error)
return await real_process_content(response, start_offset, end_offset, encryption)

# Act
try:
with mock.patch('azure.storage.blob.aio._download_async.process_content', side_effect=mock_process_content_with_error):
downloaded_data = await (await blob.download_blob()).readall()
assert downloaded_data == data
assert retry_counter.count >= 3
finally:
await service.delete_container(container_name)

# ------------------------------------------------------------------------------
Loading