Skip to content

Commit 1a5d18a

Browse files
committed
Apply logic to sync_substrate as well
1 parent 05ddb4e commit 1a5d18a

File tree

1 file changed

+12
-57
lines changed

1 file changed

+12
-57
lines changed

async_substrate_interface/sync_substrate.py

Lines changed: 12 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from async_substrate_interface.utils.decoding import (
3535
_determine_if_old_runtime_call,
3636
_bt_decode_to_dict_or_list,
37+
decode_query_map,
3738
)
3839
from async_substrate_interface.utils.storage import StorageKey
3940
from async_substrate_interface.type_registry import _TYPE_REGISTRY
@@ -2600,7 +2601,7 @@ def query_map(
26002601
block_hash = self._get_current_block_hash(block_hash, reuse_block_hash)
26012602
if block_hash:
26022603
self.last_block_hash = block_hash
2603-
self.init_runtime(block_hash=block_hash)
2604+
runtime = self.init_runtime(block_hash=block_hash)
26042605

26052606
metadata_pallet = self.runtime.metadata.get_metadata_pallet(module)
26062607
if not metadata_pallet:
@@ -2656,19 +2657,6 @@ def query_map(
26562657
result = []
26572658
last_key = None
26582659

2659-
def concat_hash_len(key_hasher: str) -> int:
2660-
"""
2661-
Helper function to avoid if statements
2662-
"""
2663-
if key_hasher == "Blake2_128Concat":
2664-
return 16
2665-
elif key_hasher == "Twox64Concat":
2666-
return 8
2667-
elif key_hasher == "Identity":
2668-
return 0
2669-
else:
2670-
raise ValueError("Unsupported hash type")
2671-
26722660
if len(result_keys) > 0:
26732661
last_key = result_keys[-1]
26742662

@@ -2681,49 +2669,16 @@ def concat_hash_len(key_hasher: str) -> int:
26812669
raise SubstrateRequestException(response["error"]["message"])
26822670

26832671
for result_group in response["result"]:
2684-
for item in result_group["changes"]:
2685-
try:
2686-
# Determine type string
2687-
key_type_string = []
2688-
for n in range(len(params), len(param_types)):
2689-
key_type_string.append(
2690-
f"[u8; {concat_hash_len(key_hashers[n])}]"
2691-
)
2692-
key_type_string.append(param_types[n])
2693-
2694-
item_key_obj = self.decode_scale(
2695-
type_string=f"({', '.join(key_type_string)})",
2696-
scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2697-
return_scale_obj=True,
2698-
)
2699-
2700-
# strip key_hashers to use as item key
2701-
if len(param_types) - len(params) == 1:
2702-
item_key = item_key_obj[1]
2703-
else:
2704-
item_key = tuple(
2705-
item_key_obj[key + 1]
2706-
for key in range(len(params), len(param_types) + 1, 2)
2707-
)
2708-
2709-
except Exception as _:
2710-
if not ignore_decoding_errors:
2711-
raise
2712-
item_key = None
2713-
2714-
try:
2715-
item_bytes = hex_to_bytes_(item[1])
2716-
2717-
item_value = self.decode_scale(
2718-
type_string=value_type,
2719-
scale_bytes=item_bytes,
2720-
return_scale_obj=True,
2721-
)
2722-
except Exception as _:
2723-
if not ignore_decoding_errors:
2724-
raise
2725-
item_value = None
2726-
result.append([item_key, item_value])
2672+
result = decode_query_map(
2673+
result_group["changes"],
2674+
prefix,
2675+
runtime,
2676+
param_types,
2677+
params,
2678+
value_type,
2679+
key_hashers,
2680+
ignore_decoding_errors,
2681+
)
27272682
return QueryMapResult(
27282683
records=result,
27292684
page_size=page_size,

0 commit comments

Comments
 (0)