Skip to content

Commit b369340

Browse files
committed
Address comments on PR #3585:
- Type fixes - Better organize error handling validation for batch error response
1 parent e06237c commit b369340

File tree

5 files changed

+56
-43
lines changed

5 files changed

+56
-43
lines changed

tests/core/providers/test_async_http_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,5 +128,5 @@ async def test_async_http_empty_batch_response(mock_async_post):
128128
with pytest.raises(Web3RPCError, match="empty batch"):
129129
await batch.async_execute()
130130

131-
# assert that event though there was an error, we have reset the batching state
131+
# assert that even though there was an error, we have reset the batching state
132132
assert not async_w3.provider._is_batching

tests/core/providers/test_async_ipc_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -386,5 +386,5 @@ async def test_persistent_connection_provider_empty_batch_response(
386386
with pytest.raises(Web3RPCError, match="empty batch"):
387387
await batch.async_execute()
388388

389-
# assert that event though there was an error, we have reset the batching state
389+
# assert that even though there was an error, we have reset the batching state
390390
assert not async_w3.provider._is_batching

tests/core/providers/test_http_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,5 +130,5 @@ def test_http_empty_batch_response(mock_post):
130130
with pytest.raises(Web3RPCError, match="empty batch"):
131131
batch.execute()
132132

133-
# assert that event though there was an error, we have reset the batching state
133+
# assert that even though there was an error, we have reset the batching state
134134
assert not w3.provider._is_batching

tests/core/providers/test_websocket_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -451,6 +451,6 @@ async def test_persistent_connection_provider_empty_batch_response():
451451
with pytest.raises(Web3RPCError, match="empty batch"):
452452
await batch.async_execute()
453453

454-
# assert that event though there was an error, we have reset the batching
454+
# assert that even though there was an error, we have reset the batching
455455
# state
456456
assert not async_w3.provider._is_batching

web3/manager.py

Lines changed: 52 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
Coroutine,
99
Dict,
1010
List,
11+
NoReturn,
1112
Optional,
1213
Sequence,
1314
Tuple,
@@ -266,6 +267,30 @@ def _validate_response(
266267
_raise_bad_response_format(response)
267268

268269

270+
def _raise_error_for_batch_response(
271+
response: RPCResponse,
272+
logger: Optional[logging.Logger] = None,
273+
) -> NoReturn:
274+
error = response.get("error")
275+
if error is None:
276+
_raise_bad_response_format(
277+
response,
278+
"Batch response must be formatted as a list of responses or "
279+
"as a single JSON-RPC error response.",
280+
)
281+
_validate_response(
282+
response,
283+
None,
284+
is_subscription_response=False,
285+
logger=logger,
286+
params=[],
287+
)
288+
# This should not be reached, but if it is, raise a generic `BadResponseFormat`
289+
raise BadResponseFormat(
290+
"Batch response was in an unexpected format and unable to be parsed."
291+
)
292+
293+
269294
class RequestManager:
270295
logger = logging.getLogger("web3.manager.RequestManager")
271296

@@ -443,22 +468,16 @@ def _make_batch_request(
443468
]
444469
)
445470

446-
if not isinstance(response, list):
447-
# Expect a JSON-RPC error response and call _validate_response to raise
448-
# the appropriate exception
449-
_validate_response(
450-
cast(RPCResponse, response),
451-
None,
452-
is_subscription_response=False,
453-
logger=self.logger,
454-
params=[],
455-
)
456-
457-
formatted_responses = [
458-
self._format_batched_response(info, cast(RPCResponse, resp))
459-
for info, resp in zip(requests_info, response)
460-
]
461-
return list(formatted_responses)
471+
if isinstance(response, list):
472+
# expected format
473+
formatted_responses = [
474+
self._format_batched_response(info, cast(RPCResponse, resp))
475+
for info, resp in zip(requests_info, response)
476+
]
477+
return list(formatted_responses)
478+
else:
479+
# expect a single response with an error
480+
_raise_error_for_batch_response(response, self.logger)
462481

463482
async def _async_make_batch_request(
464483
self,
@@ -484,30 +503,24 @@ async def _async_make_batch_request(
484503
]
485504
)
486505

487-
if not isinstance(response, list):
488-
# Expect a JSON-RPC error response and call _validate_response to raise
489-
# the appropriate exception
490-
_validate_response(
491-
cast(RPCResponse, response),
492-
None,
493-
is_subscription_response=False,
494-
logger=self.logger,
495-
params=[],
496-
)
497-
498-
response = cast(List[RPCResponse], response)
499-
if isinstance(self.provider, PersistentConnectionProvider):
500-
# call _process_response for each response in the batch
501-
return [
502-
cast(RPCResponse, await self._process_response(resp))
503-
for resp in response
506+
if isinstance(response, list):
507+
# expected format
508+
response = cast(List[RPCResponse], response)
509+
if isinstance(self.provider, PersistentConnectionProvider):
510+
# call _process_response for each response in the batch
511+
return [
512+
cast(RPCResponse, await self._process_response(resp))
513+
for resp in response
514+
]
515+
516+
formatted_responses = [
517+
self._format_batched_response(info, resp)
518+
for info, resp in zip(unpacked_requests_info, response)
504519
]
505-
506-
formatted_responses = [
507-
self._format_batched_response(info, resp)
508-
for info, resp in zip(unpacked_requests_info, response)
509-
]
510-
return list(formatted_responses)
520+
return list(formatted_responses)
521+
else:
522+
# expect a single response with an error
523+
_raise_error_for_batch_response(response, self.logger)
511524

512525
def _format_batched_response(
513526
self,

0 commit comments

Comments
 (0)