Skip to content

Commit 8654fbc

Browse files
authored
Merge pull request #84 from opentensor/experimental/thewhaleking/cache-decoding
Improved Query Map Decodes
2 parents e4c55c9 + 5a6946a commit 8654fbc

File tree

5 files changed

+228
-125
lines changed

5 files changed

+228
-125
lines changed

async_substrate_interface/async_substrate.py

Lines changed: 18 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,9 @@
5656
)
5757
from async_substrate_interface.utils.storage import StorageKey
5858
from async_substrate_interface.type_registry import _TYPE_REGISTRY
59+
from async_substrate_interface.utils.decoding import (
60+
decode_query_map,
61+
)
5962

6063
if TYPE_CHECKING:
6164
from websockets.asyncio.client import ClientConnection
@@ -898,7 +901,7 @@ async def decode_scale(
898901
else:
899902
return obj
900903

901-
async def load_runtime(self, runtime):
904+
def load_runtime(self, runtime):
902905
self.runtime = runtime
903906

904907
# Update type registry
@@ -954,7 +957,7 @@ async def init_runtime(
954957
)
955958

956959
if self.runtime and runtime_version == self.runtime.runtime_version:
957-
return
960+
return self.runtime
958961

959962
runtime = self.runtime_cache.retrieve(runtime_version=runtime_version)
960963
if not runtime:
@@ -990,7 +993,7 @@ async def init_runtime(
990993
runtime_version=runtime_version, runtime=runtime
991994
)
992995

993-
await self.load_runtime(runtime)
996+
self.load_runtime(runtime)
994997

995998
if self.ss58_format is None:
996999
# Check and apply runtime constants
@@ -1000,6 +1003,7 @@ async def init_runtime(
10001003

10011004
if ss58_prefix_constant:
10021005
self.ss58_format = ss58_prefix_constant
1006+
return runtime
10031007

10041008
async def create_storage_key(
10051009
self,
@@ -2892,12 +2896,11 @@ async def query_map(
28922896
Returns:
28932897
AsyncQueryMapResult object
28942898
"""
2895-
hex_to_bytes_ = hex_to_bytes
28962899
params = params or []
28972900
block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
28982901
if block_hash:
28992902
self.last_block_hash = block_hash
2900-
await self.init_runtime(block_hash=block_hash)
2903+
runtime = await self.init_runtime(block_hash=block_hash)
29012904

29022905
metadata_pallet = self.runtime.metadata.get_metadata_pallet(module)
29032906
if not metadata_pallet:
@@ -2952,19 +2955,6 @@ async def query_map(
29522955
result = []
29532956
last_key = None
29542957

2955-
def concat_hash_len(key_hasher: str) -> int:
2956-
"""
2957-
Helper function to avoid if statements
2958-
"""
2959-
if key_hasher == "Blake2_128Concat":
2960-
return 16
2961-
elif key_hasher == "Twox64Concat":
2962-
return 8
2963-
elif key_hasher == "Identity":
2964-
return 0
2965-
else:
2966-
raise ValueError("Unsupported hash type")
2967-
29682958
if len(result_keys) > 0:
29692959
last_key = result_keys[-1]
29702960

@@ -2975,51 +2965,17 @@ def concat_hash_len(key_hasher: str) -> int:
29752965

29762966
if "error" in response:
29772967
raise SubstrateRequestException(response["error"]["message"])
2978-
29792968
for result_group in response["result"]:
2980-
for item in result_group["changes"]:
2981-
try:
2982-
# Determine type string
2983-
key_type_string = []
2984-
for n in range(len(params), len(param_types)):
2985-
key_type_string.append(
2986-
f"[u8; {concat_hash_len(key_hashers[n])}]"
2987-
)
2988-
key_type_string.append(param_types[n])
2989-
2990-
item_key_obj = await self.decode_scale(
2991-
type_string=f"({', '.join(key_type_string)})",
2992-
scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2993-
return_scale_obj=True,
2994-
)
2995-
2996-
# strip key_hashers to use as item key
2997-
if len(param_types) - len(params) == 1:
2998-
item_key = item_key_obj[1]
2999-
else:
3000-
item_key = tuple(
3001-
item_key_obj[key + 1]
3002-
for key in range(len(params), len(param_types) + 1, 2)
3003-
)
3004-
3005-
except Exception as _:
3006-
if not ignore_decoding_errors:
3007-
raise
3008-
item_key = None
3009-
3010-
try:
3011-
item_bytes = hex_to_bytes_(item[1])
3012-
3013-
item_value = await self.decode_scale(
3014-
type_string=value_type,
3015-
scale_bytes=item_bytes,
3016-
return_scale_obj=True,
3017-
)
3018-
except Exception as _:
3019-
if not ignore_decoding_errors:
3020-
raise
3021-
item_value = None
3022-
result.append([item_key, item_value])
2969+
result = decode_query_map(
2970+
result_group["changes"],
2971+
prefix,
2972+
runtime,
2973+
param_types,
2974+
params,
2975+
value_type,
2976+
key_hashers,
2977+
ignore_decoding_errors,
2978+
)
30232979
return AsyncQueryMapResult(
30242980
records=result,
30252981
page_size=page_size,

async_substrate_interface/sync_substrate.py

Lines changed: 17 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from async_substrate_interface.utils.decoding import (
3535
_determine_if_old_runtime_call,
3636
_bt_decode_to_dict_or_list,
37+
decode_query_map,
3738
)
3839
from async_substrate_interface.utils.storage import StorageKey
3940
from async_substrate_interface.type_registry import _TYPE_REGISTRY
@@ -525,7 +526,9 @@ def __enter__(self):
525526
return self
526527

527528
def __del__(self):
528-
self.close()
529+
self.ws.close()
530+
print("DELETING SUBSTATE")
531+
# self.ws.protocol.fail(code=1006) # ABNORMAL_CLOSURE
529532

530533
def initialize(self):
531534
"""
@@ -703,7 +706,7 @@ def init_runtime(
703706
)
704707

705708
if self.runtime and runtime_version == self.runtime.runtime_version:
706-
return
709+
return self.runtime
707710

708711
runtime = self.runtime_cache.retrieve(runtime_version=runtime_version)
709712
if not runtime:
@@ -757,6 +760,7 @@ def init_runtime(
757760

758761
if ss58_prefix_constant:
759762
self.ss58_format = ss58_prefix_constant
763+
return runtime
760764

761765
def create_storage_key(
762766
self,
@@ -2598,7 +2602,7 @@ def query_map(
25982602
block_hash = self._get_current_block_hash(block_hash, reuse_block_hash)
25992603
if block_hash:
26002604
self.last_block_hash = block_hash
2601-
self.init_runtime(block_hash=block_hash)
2605+
runtime = self.init_runtime(block_hash=block_hash)
26022606

26032607
metadata_pallet = self.runtime.metadata.get_metadata_pallet(module)
26042608
if not metadata_pallet:
@@ -2654,19 +2658,6 @@ def query_map(
26542658
result = []
26552659
last_key = None
26562660

2657-
def concat_hash_len(key_hasher: str) -> int:
2658-
"""
2659-
Helper function to avoid if statements
2660-
"""
2661-
if key_hasher == "Blake2_128Concat":
2662-
return 16
2663-
elif key_hasher == "Twox64Concat":
2664-
return 8
2665-
elif key_hasher == "Identity":
2666-
return 0
2667-
else:
2668-
raise ValueError("Unsupported hash type")
2669-
26702661
if len(result_keys) > 0:
26712662
last_key = result_keys[-1]
26722663

@@ -2679,49 +2670,16 @@ def concat_hash_len(key_hasher: str) -> int:
26792670
raise SubstrateRequestException(response["error"]["message"])
26802671

26812672
for result_group in response["result"]:
2682-
for item in result_group["changes"]:
2683-
try:
2684-
# Determine type string
2685-
key_type_string = []
2686-
for n in range(len(params), len(param_types)):
2687-
key_type_string.append(
2688-
f"[u8; {concat_hash_len(key_hashers[n])}]"
2689-
)
2690-
key_type_string.append(param_types[n])
2691-
2692-
item_key_obj = self.decode_scale(
2693-
type_string=f"({', '.join(key_type_string)})",
2694-
scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2695-
return_scale_obj=True,
2696-
)
2697-
2698-
# strip key_hashers to use as item key
2699-
if len(param_types) - len(params) == 1:
2700-
item_key = item_key_obj[1]
2701-
else:
2702-
item_key = tuple(
2703-
item_key_obj[key + 1]
2704-
for key in range(len(params), len(param_types) + 1, 2)
2705-
)
2706-
2707-
except Exception as _:
2708-
if not ignore_decoding_errors:
2709-
raise
2710-
item_key = None
2711-
2712-
try:
2713-
item_bytes = hex_to_bytes_(item[1])
2714-
2715-
item_value = self.decode_scale(
2716-
type_string=value_type,
2717-
scale_bytes=item_bytes,
2718-
return_scale_obj=True,
2719-
)
2720-
except Exception as _:
2721-
if not ignore_decoding_errors:
2722-
raise
2723-
item_value = None
2724-
result.append([item_key, item_value])
2673+
result = decode_query_map(
2674+
result_group["changes"],
2675+
prefix,
2676+
runtime,
2677+
param_types,
2678+
params,
2679+
value_type,
2680+
key_hashers,
2681+
ignore_decoding_errors,
2682+
)
27252683
return QueryMapResult(
27262684
records=result,
27272685
page_size=page_size,

async_substrate_interface/utils/decoding.py

Lines changed: 92 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,14 @@
1-
from typing import Union
1+
from typing import Union, TYPE_CHECKING
22

3-
from bt_decode import AxonInfo, PrometheusInfo
3+
from bt_decode import AxonInfo, PrometheusInfo, decode_list
4+
from scalecodec import ss58_encode
5+
from bittensor_wallet.utils import SS58_FORMAT
6+
7+
from async_substrate_interface.utils import hex_to_bytes
8+
from async_substrate_interface.types import ScaleObj
9+
10+
if TYPE_CHECKING:
11+
from async_substrate_interface.types import Runtime
412

513

614
def _determine_if_old_runtime_call(runtime_call_def, metadata_v15_value) -> bool:
@@ -44,3 +52,85 @@ def _bt_decode_to_dict_or_list(obj) -> Union[dict, list[dict]]:
4452
else:
4553
as_dict[key] = val
4654
return as_dict
55+
56+
57+
def _decode_scale_list_with_runtime(
58+
type_strings: list[str],
59+
scale_bytes_list: list[bytes],
60+
runtime_registry,
61+
return_scale_obj: bool = False,
62+
):
63+
obj = decode_list(type_strings, runtime_registry, scale_bytes_list)
64+
if return_scale_obj:
65+
return [ScaleObj(x) for x in obj]
66+
else:
67+
return obj
68+
69+
70+
def decode_query_map(
71+
result_group_changes,
72+
prefix,
73+
runtime: "Runtime",
74+
param_types,
75+
params,
76+
value_type,
77+
key_hashers,
78+
ignore_decoding_errors,
79+
):
80+
def concat_hash_len(key_hasher: str) -> int:
81+
"""
82+
Helper function to avoid if statements
83+
"""
84+
if key_hasher == "Blake2_128Concat":
85+
return 16
86+
elif key_hasher == "Twox64Concat":
87+
return 8
88+
elif key_hasher == "Identity":
89+
return 0
90+
else:
91+
raise ValueError("Unsupported hash type")
92+
93+
hex_to_bytes_ = hex_to_bytes
94+
95+
result = []
96+
# Determine type string
97+
key_type_string_ = []
98+
for n in range(len(params), len(param_types)):
99+
key_type_string_.append(f"[u8; {concat_hash_len(key_hashers[n])}]")
100+
key_type_string_.append(param_types[n])
101+
key_type_string = f"({', '.join(key_type_string_)})"
102+
103+
pre_decoded_keys = []
104+
pre_decoded_key_types = [key_type_string] * len(result_group_changes)
105+
pre_decoded_values = []
106+
pre_decoded_value_types = [value_type] * len(result_group_changes)
107+
108+
for item in result_group_changes:
109+
pre_decoded_keys.append(bytes.fromhex(item[0][len(prefix) :]))
110+
pre_decoded_values.append(hex_to_bytes_(item[1]))
111+
all_decoded = _decode_scale_list_with_runtime(
112+
pre_decoded_key_types + pre_decoded_value_types,
113+
pre_decoded_keys + pre_decoded_values,
114+
runtime.registry,
115+
)
116+
middl_index = len(all_decoded) // 2
117+
decoded_keys = all_decoded[:middl_index]
118+
decoded_values = [ScaleObj(x) for x in all_decoded[middl_index:]]
119+
for dk, dv in zip(decoded_keys, decoded_values):
120+
try:
121+
# strip key_hashers to use as item key
122+
if len(param_types) - len(params) == 1:
123+
item_key = dk[1]
124+
else:
125+
item_key = tuple(
126+
dk[key + 1] for key in range(len(params), len(param_types) + 1, 2)
127+
)
128+
129+
except Exception as _:
130+
if not ignore_decoding_errors:
131+
raise
132+
item_key = None
133+
134+
item_value = dv
135+
result.append([item_key, item_value])
136+
return result

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ dependencies = [
1010
"wheel",
1111
"asyncstdlib~=3.13.0",
1212
"bittensor-wallet>=2.1.3",
13-
"bt-decode==v0.5.0",
13+
"bt-decode==v0.6.0",
1414
"scalecodec~=1.2.11",
1515
"websockets>=14.1",
1616
"xxhash"

0 commit comments

Comments
 (0)