Skip to content

Commit

Permalink
Namecoin / AuxPoW: Avoid some copy operations in blockchain.deseriali…
Browse files Browse the repository at this point in the history
…ze_header.
  • Loading branch information
JeremyRand committed Jul 2, 2018
1 parent 751e1d7 commit 4038bc3
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 12 deletions.
5 changes: 3 additions & 2 deletions lib/auxpow.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def auxpow_active(base_header):
def get_chain_id(base_header):
return base_header['version'] >> 16

# If expect_trailing_data, returns start position of trailing data
def deserialize_auxpow_header(base_header, s, expect_trailing_data=False, start_position=0):
if len(s) - start_position == 0 and not expect_trailing_data:
return None
Expand Down Expand Up @@ -96,15 +97,15 @@ def deserialize_auxpow_header(base_header, s, expect_trailing_data=False, start_
# Finally there's the parent header. Deserialize it, along with any
# trailing data if requested.
if expect_trailing_data:
auxpow_header['parent_header'], trailing_data = electrum_nmc.blockchain.deserialize_header(s, 1, expect_trailing_data=expect_trailing_data, start_position=start_position)
auxpow_header['parent_header'], start_position = electrum_nmc.blockchain.deserialize_header(s, 1, expect_trailing_data=expect_trailing_data, start_position=start_position)
else:
auxpow_header['parent_header'] = electrum_nmc.blockchain.deserialize_header(s, 1, expect_trailing_data=expect_trailing_data, start_position=start_position)
# The parent block header doesn't have any block height,
# so delete that field. (We used 1 as a dummy value above.)
del auxpow_header['parent_header']['block_height']

if expect_trailing_data:
return auxpow_header, trailing_data
return auxpow_header, start_position

return auxpow_header

Expand Down
20 changes: 10 additions & 10 deletions lib/blockchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def serialize_header(res):
+ int_to_hex(int(res.get('nonce')), 4)
return s

# If expect_trailing_data, returns trailing data slice
# If expect_trailing_data, returns start position of trailing data
def deserialize_header(s, height, expect_trailing_data=False, start_position=0):
if not s:
raise Exception('Invalid header: {}'.format(s))
Expand All @@ -64,17 +64,17 @@ def deserialize_header(s, height, expect_trailing_data=False, start_position=0):

if auxpow.auxpow_active(h):
if expect_trailing_data:
h['auxpow'], trailing_data = auxpow.deserialize_auxpow_header(h, s, expect_trailing_data=True, start_position=start_position+80)
h['auxpow'], start_position = auxpow.deserialize_auxpow_header(h, s, expect_trailing_data=True, start_position=start_position+80)
else:
h['auxpow'] = auxpow.deserialize_auxpow_header(h, s, start_position=start_position+80)
else:
if expect_trailing_data:
trailing_data = s[start_position+80:]
start_position = start_position+80
elif len(s) - start_position != 80:
raise Exception('Invalid header length: {}'.format(len(s) - start_position))

if expect_trailing_data:
return h, trailing_data
return h, start_position

return h

Expand Down Expand Up @@ -189,18 +189,18 @@ def verify_header(self, header, prev_hash, target):

def verify_chunk(self, index, data):
stripped = bytearray()
trailing_data = data
start_position = 0
prev_hash = self.get_hash(index * 2016 - 1)
target = self.get_target(index-1)
i = 0
while len(trailing_data) > 0:
header, trailing_data = deserialize_header(trailing_data, index*2016 + i, expect_trailing_data=True)
while start_position < len(data):
# Strip auxpow header for disk
stripped.extend(data[start_position:start_position+80])

header, start_position = deserialize_header(data, index*2016 + i, expect_trailing_data=True, start_position=start_position)
self.verify_header(header, prev_hash, target)
prev_hash = hash_header(header)

# Strip auxpow header for disk
stripped.extend(bfh(serialize_header(header)))

i = i + 1

return bytes(stripped)
Expand Down

0 comments on commit 4038bc3

Please sign in to comment.