Skip to content

Commit

Permalink
Updated RPC methods to work with chopsticks (#331)
Browse files Browse the repository at this point in the history
* Updated RPC methods to work with chopsticks
Added backwards compatibility with older RPC

* Added RPC compatibility unit tests
  • Loading branch information
arjanz committed Mar 20, 2023
1 parent 6670599 commit 58f4bfe
Show file tree
Hide file tree
Showing 4 changed files with 280 additions and 65 deletions.
147 changes: 85 additions & 62 deletions substrateinterface/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,10 @@ def supports_rpc_method(self, name: str) -> bool:
bool
"""
if self.config.get('rpc_methods') is None:
self.config['rpc_methods'] = self.rpc_request("rpc_methods", [])['result']['methods']
self.config['rpc_methods'] = []
result = self.rpc_request("rpc_methods", []).get('result')
if result:
self.config['rpc_methods'] = result.get('methods', [])

return name in self.config['rpc_methods']

Expand Down Expand Up @@ -428,7 +431,10 @@ def get_chain_head(self):
-------
"""
response = self.rpc_request("chain_getHead", [])
if self.supports_rpc_method("chain_getHead"):
response = self.rpc_request("chain_getHead", [])
else:
response = self.rpc_request("chain_getBlockHash", [])

if response is not None:
if 'error' in response:
Expand All @@ -438,21 +444,21 @@ def get_chain_head(self):

def get_chain_finalised_head(self):
"""
A pass-though to existing JSONRPC method `chain_getFinalisedHead`
A pass-though to existing JSONRPC method `chain_getFinalizedHead`
Returns
-------
"""
response = self.rpc_request("chain_getFinalisedHead", [])
response = self.rpc_request("chain_getFinalizedHead", [])

if response is not None:
if 'error' in response:
raise SubstrateRequestException(response['error']['message'])

return response.get('result')

def get_block_hash(self, block_id: int) -> str:
def get_block_hash(self, block_id: int = None) -> str:
"""
A pass-though to existing JSONRPC method `chain_getBlockHash`
Expand Down Expand Up @@ -526,7 +532,7 @@ def get_block_metadata(self, block_hash=None, decode=True):

def get_storage_by_key(self, block_hash: str, storage_key: str):
"""
A pass-though to existing JSONRPC method `state_getStorageAt`
A pass-though to existing JSONRPC method `state_getStorage`
Parameters
----------
Expand All @@ -538,7 +544,11 @@ def get_storage_by_key(self, block_hash: str, storage_key: str):
"""

response = self.rpc_request("state_getStorageAt", [storage_key, block_hash])
if self.supports_rpc_method('state_getStorageAt'):
response = self.rpc_request("state_getStorageAt", [storage_key, block_hash])
else:
response = self.rpc_request("state_getStorage", [storage_key, block_hash])

if 'result' in response:
return response.get('result')
elif 'error' in response:
Expand All @@ -557,7 +567,10 @@ def get_block_runtime_version(self, block_hash):
-------
"""
response = self.rpc_request("chain_getRuntimeVersion", [block_hash])
if self.supports_rpc_method("state_getRuntimeVersion"):
response = self.rpc_request("state_getRuntimeVersion", [block_hash])
else:
response = self.rpc_request("chain_getRuntimeVersion", [block_hash])

if 'error' in response:
raise SubstrateRequestException(response['error']['message'])
Expand Down Expand Up @@ -948,7 +961,10 @@ def result_handler(storage_key, updated_obj, update_nr, subscription_id):

else:

response = self.rpc_request("state_getStorageAt", [storage_key.to_hex(), block_hash])
if self.supports_rpc_method('state_getStorageAt'):
response = self.rpc_request("state_getStorageAt", [storage_key.to_hex(), block_hash])
else:
response = self.rpc_request("state_getStorage", [storage_key.to_hex(), block_hash])

if 'error' in response:
raise SubstrateRequestException(response['error']['message'])
Expand Down Expand Up @@ -1667,17 +1683,21 @@ def submit_extrinsic(self, extrinsic: GenericExtrinsic, wait_for_inclusion: bool
def result_handler(message, update_nr, subscription_id):
# Check if extrinsic is included and finalized
if 'params' in message and type(message['params']['result']) is dict:
if 'finalized' in message['params']['result'] and wait_for_finalization:

# Convert result enum to lower for backwards compatibility
message_result = {k.lower(): v for k, v in message['params']['result'].items()}

if 'finalized' in message_result and wait_for_finalization:
self.rpc_request('author_unwatchExtrinsic', [subscription_id])
return {
'block_hash': message['params']['result']['finalized'],
'block_hash': message_result['finalized'],
'extrinsic_hash': '0x{}'.format(extrinsic.extrinsic_hash.hex()),
'finalized': True
}
elif 'inBlock' in message['params']['result'] and wait_for_inclusion and not wait_for_finalization:
elif 'inblock' in message_result and wait_for_inclusion and not wait_for_finalization:
self.rpc_request('author_unwatchExtrinsic', [subscription_id])
return {
'block_hash': message['params']['result']['inBlock'],
'block_hash': message_result['inblock'],
'extrinsic_hash': '0x{}'.format(extrinsic.extrinsic_hash.hex()),
'finalized': False
}
Expand Down Expand Up @@ -2211,7 +2231,9 @@ def decode_block(block_data, block_data_hash=None):
if block_data_hash:
block_data['header']['hash'] = block_data_hash

block_data['header']['number'] = int(block_data['header']['number'], 16)
if type(block_data['header']['number']) is str:
# Convert block number from hex (backwards compatibility)
block_data['header']['number'] = int(block_data['header']['number'], 16)

extrinsic_cls = self.runtime_config.get_decoder_class('Extrinsic')

Expand All @@ -2232,72 +2254,73 @@ def decode_block(block_data, block_data_hash=None):
block_data['extrinsics'][idx] = None

for idx, log_data in enumerate(block_data['header']["digest"]["logs"]):
if type(log_data) is str:
# Convert digest log from hex (backwards compatibility)
try:
log_digest_cls = self.runtime_config.get_decoder_class('sp_runtime::generic::digest::DigestItem')

try:
log_digest_cls = self.runtime_config.get_decoder_class('sp_runtime::generic::digest::DigestItem')

if log_digest_cls is None:
raise NotImplementedError("No decoding class found for 'DigestItem'")
if log_digest_cls is None:
raise NotImplementedError("No decoding class found for 'DigestItem'")

log_digest = log_digest_cls(data=ScaleBytes(log_data))
log_digest.decode()
log_digest = log_digest_cls(data=ScaleBytes(log_data))
log_digest.decode()

block_data['header']["digest"]["logs"][idx] = log_digest
block_data['header']["digest"]["logs"][idx] = log_digest

if include_author and 'PreRuntime' in log_digest.value:
if include_author and 'PreRuntime' in log_digest.value:

if self.implements_scaleinfo():
if self.implements_scaleinfo():

engine = bytes(log_digest[1][0])
# Retrieve validator set
validator_set = self.query("Session", "Validators", block_hash=block_hash)
engine = bytes(log_digest[1][0])
# Retrieve validator set
validator_set = self.query("Session", "Validators", block_hash=block_hash)

if engine == b'BABE':
babe_predigest = self.runtime_config.create_scale_object(
type_string='RawBabePreDigest',
data=ScaleBytes(bytes(log_digest[1][1]))
)
if engine == b'BABE':
babe_predigest = self.runtime_config.create_scale_object(
type_string='RawBabePreDigest',
data=ScaleBytes(bytes(log_digest[1][1]))
)

babe_predigest.decode()
babe_predigest.decode()

rank_validator = babe_predigest[1].value['authority_index']
rank_validator = babe_predigest[1].value['authority_index']

block_author = validator_set[rank_validator]
block_data['author'] = block_author.value
block_author = validator_set[rank_validator]
block_data['author'] = block_author.value

elif engine == b'aura':
aura_predigest = self.runtime_config.create_scale_object(
type_string='RawAuraPreDigest',
data=ScaleBytes(bytes(log_digest[1][1]))
)
elif engine == b'aura':
aura_predigest = self.runtime_config.create_scale_object(
type_string='RawAuraPreDigest',
data=ScaleBytes(bytes(log_digest[1][1]))
)

aura_predigest.decode()
aura_predigest.decode()

rank_validator = aura_predigest.value['slot_number'] % len(validator_set)
rank_validator = aura_predigest.value['slot_number'] % len(validator_set)

block_author = validator_set[rank_validator]
block_data['author'] = block_author.value
block_author = validator_set[rank_validator]
block_data['author'] = block_author.value
else:
raise NotImplementedError(
f"Cannot extract author for engine {log_digest.value['PreRuntime'][0]}"
)
else:
raise NotImplementedError(
f"Cannot extract author for engine {log_digest.value['PreRuntime'][0]}"
)
else:

if log_digest.value['PreRuntime']['engine'] == 'BABE':
validator_set = self.query("Session", "Validators", block_hash=block_hash)
rank_validator = log_digest.value['PreRuntime']['data']['authority_index']
if log_digest.value['PreRuntime']['engine'] == 'BABE':
validator_set = self.query("Session", "Validators", block_hash=block_hash)
rank_validator = log_digest.value['PreRuntime']['data']['authority_index']

block_author = validator_set.elements[rank_validator]
block_data['author'] = block_author.value
else:
raise NotImplementedError(
f"Cannot extract author for engine {log_digest.value['PreRuntime']['engine']}"
)
block_author = validator_set.elements[rank_validator]
block_data['author'] = block_author.value
else:
raise NotImplementedError(
f"Cannot extract author for engine {log_digest.value['PreRuntime']['engine']}"
)

except Exception:
if not ignore_decoding_errors:
raise
block_data['header']["digest"]["logs"][idx] = None
except Exception:
if not ignore_decoding_errors:
raise
block_data['header']["digest"]["logs"][idx] = None

return block_data

Expand Down
10 changes: 8 additions & 2 deletions test/test_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,13 @@ def mocked_query(module, storage_function, block_hash):

def mocked_request(method, params, result_handler=None):

if method in ['chain_getBlockHash', 'chain_getHead', 'chain_getFinalisedHead']:
if method in ['chain_getBlockHash', 'chain_getHead', 'chain_getFinalisedHead', 'chain_getFinalizedHead']:
return {
"jsonrpc": "2.0",
"result": "0xec828914eca09331dad704404479e2899a971a9b5948345dc40abca4ac818f93",
"id": 1
}
elif method == 'chain_getRuntimeVersion':
elif method in ['chain_getRuntimeVersion', 'state_getRuntimeVersion']:
return {
"jsonrpc": "2.0",
"result": {"specVersion": 100, "transactionVersion": 1},
Expand Down Expand Up @@ -156,6 +156,12 @@ def mocked_request(method, params, result_handler=None):
"jsonrpc": "2.0",
"result": True
}
elif method == 'rpc_methods':
return {
"jsonrpc": "2.0",
"result": {'methods': ['account_nextIndex', 'author_hasKey', 'author_hasSessionKeys', 'author_insertKey', 'author_pendingExtrinsics', 'author_removeExtrinsic', 'author_rotateKeys', 'author_submitAndWatchExtrinsic', 'author_submitExtrinsic', 'author_unwatchExtrinsic', 'babe_epochAuthorship', 'chainHead_unstable_body', 'chainHead_unstable_call', 'chainHead_unstable_follow', 'chainHead_unstable_genesisHash', 'chainHead_unstable_header', 'chainHead_unstable_stopBody', 'chainHead_unstable_stopCall', 'chainHead_unstable_stopStorage', 'chainHead_unstable_storage', 'chainHead_unstable_unfollow', 'chainHead_unstable_unpin', 'chainSpec_unstable_chainName', 'chainSpec_unstable_genesisHash', 'chainSpec_unstable_properties', 'chain_getBlock', 'chain_getBlockHash', 'chain_getFinalisedHead', 'chain_getFinalizedHead', 'chain_getHead', 'chain_getHeader', 'chain_getRuntimeVersion', 'chain_subscribeAllHeads', 'chain_subscribeFinalisedHeads', 'chain_subscribeFinalizedHeads', 'chain_subscribeNewHead', 'chain_subscribeNewHeads', 'chain_subscribeRuntimeVersion', 'chain_unsubscribeAllHeads', 'chain_unsubscribeFinalisedHeads', 'chain_unsubscribeFinalizedHeads', 'chain_unsubscribeNewHead', 'chain_unsubscribeNewHeads', 'chain_unsubscribeRuntimeVersion', 'childstate_getKeys', 'childstate_getKeysPaged', 'childstate_getKeysPagedAt', 'childstate_getStorage', 'childstate_getStorageEntries', 'childstate_getStorageHash', 'childstate_getStorageSize', 'dev_getBlockStats', 'grandpa_proveFinality', 'grandpa_roundState', 'grandpa_subscribeJustifications', 'grandpa_unsubscribeJustifications', 'mmr_generateProof', 'mmr_root', 'mmr_verifyProof', 'mmr_verifyProofStateless', 'offchain_localStorageGet', 'offchain_localStorageSet', 'payment_queryFeeDetails', 'payment_queryInfo', 'state_call', 'state_callAt', 'state_getChildReadProof', 'state_getKeys', 'state_getKeysPaged', 'state_getKeysPagedAt', 'state_getMetadata', 'state_getPairs', 'state_getReadProof', 'state_getRuntimeVersion', 'state_getStorage', 'state_getStorageAt', 'state_getStorageHash', 'state_getStorageHashAt', 'state_getStorageSize', 'state_getStorageSizeAt', 'state_queryStorage', 'state_queryStorageAt', 'state_subscribeRuntimeVersion', 'state_subscribeStorage', 'state_traceBlock', 'state_trieMigrationStatus', 'state_unsubscribeRuntimeVersion', 'state_unsubscribeStorage', 'subscribe_newHead', 'sync_state_genSyncSpec', 'system_accountNextIndex', 'system_addLogFilter', 'system_addReservedPeer', 'system_chain', 'system_chainType', 'system_dryRun', 'system_dryRunAt', 'system_health', 'system_localListenAddresses', 'system_localPeerId', 'system_name', 'system_nodeRoles', 'system_peers', 'system_properties', 'system_removeReservedPeer', 'system_reservedPeers', 'system_resetLogFilter', 'system_syncState', 'system_unstable_networkState', 'system_version', 'transaction_unstable_submitAndWatch', 'transaction_unstable_unwatch', 'unsubscribe_newHead']},
"id": 1
}

raise ValueError(f"Unsupported mocked method {method}")

Expand Down

0 comments on commit 58f4bfe

Please sign in to comment.