Skip to content
This repository has been archived by the owner on Jul 1, 2021. It is now read-only.

Commit

Permalink
Fix crash when Trinity was launched with a checkpoint too close from …
Browse files Browse the repository at this point in the history
…tip in Beam Sync mode
  • Loading branch information
Elnaril committed Nov 30, 2020
1 parent 0deec1a commit d501485
Show file tree
Hide file tree
Showing 3 changed files with 128 additions and 5 deletions.
1 change: 1 addition & 0 deletions newsfragments/2091.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix crash when Trinity was launched with a checkpoint too close from tip in Beam Sync mode
85 changes: 85 additions & 0 deletions tests/core/p2p-proto/test_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
from trinity.sync.beam.chain import (
BeamSyncer,
BodyChainGapSyncer,
NoActionableGap,
)
from trinity.sync.beam.queen import QueeningQueue
from trinity.sync.header.chain import (
Expand Down Expand Up @@ -134,6 +135,27 @@ def chaindb_with_block_gaps(chaindb_fresh, chaindb_1000):
yield chaindb_fresh


@pytest.fixture
def chaindb_with_headers_from_checkpoint(chaindb_fresh, chaindb_1000):
# Make a chain with a header gap from genesis to checkpoint and a block gap from genesis
# to tip block.
for block_number in range(970, 1001):
header_at = chaindb_1000.get_canonical_block_header_by_number(block_number)
score_at = chaindb_1000.get_score(header_at.hash)
chaindb_fresh.persist_checkpoint_header(header_at, score_at)

fat_chain = LatestTestChain(chaindb_1000.db)
block_number = 1000
block = fat_chain.get_canonical_block_by_number(block_number)
receipts = block.get_receipts(chaindb_1000)
chaindb_fresh.persist_unexecuted_block(block, receipts)

assert chaindb_fresh.get_header_chain_gaps() == (((1, 969),), 1001)
assert chaindb_fresh.get_chain_gaps() == (((1, 999),), 1001)

yield chaindb_fresh


@pytest.mark.asyncio
async def test_fast_syncer(request, event_loop, event_bus, chaindb_fresh, chaindb_1000):

Expand Down Expand Up @@ -724,6 +746,69 @@ async def test_block_gapfill_syncer(request,
assert chain_with_gaps.chaindb.get_chain_gaps() == ((), 1001)


@pytest.mark.asyncio
async def test_block_gapfill_from_checkpoint_syncer(event_bus,
chaindb_with_headers_from_checkpoint,
chaindb_1000):
client_context = ChainContextFactory(headerdb__db=chaindb_with_headers_from_checkpoint.db)
server_context = ChainContextFactory(headerdb__db=chaindb_1000.db)
peer_pair = LatestETHPeerPairFactory(
alice_peer_context=client_context,
bob_peer_context=server_context,
event_bus=event_bus,
)
async with peer_pair as (client_peer, server_peer):
chain_with_gaps = LatestTestChain(chaindb_with_headers_from_checkpoint.db)

syncer = BodyChainGapSyncer(
chain_with_gaps,
chaindb_with_headers_from_checkpoint,
MockPeerPoolWithConnectedPeers([client_peer], event_bus=event_bus),
)

# In production, this would be the block time but we want our test to pause/resume swiftly
syncer._idle_time = 0.01
server_peer_pool = MockPeerPoolWithConnectedPeers([server_peer], event_bus=event_bus)
syncer._max_backfill_block_bodies_at_once = 100

# The most recent actionable block gap is the most recent block gap of maximum
# _max_backfill_block_bodies_at_once blocks and for which we already have the headers
gaps, _ = chain_with_gaps.chaindb.get_chain_gaps()
header_gaps, _ = chain_with_gaps.chaindb.get_header_chain_gaps()
with pytest.raises(NoActionableGap):
# no actionable block gap at the moment
syncer.get_topmost_actionable_gap(gaps, header_gaps)

# Add enough headers to have an actionable gap:
fat_chain = LatestTestChain(chaindb_1000.db)
for block_number in range(898, 970):
header = fat_chain.get_canonical_block_header_by_number(block_number)
score = fat_chain.get_score(header.hash)
chain_with_gaps.chaindb.persist_checkpoint_header(header, score)

header_gaps, _ = chain_with_gaps.chaindb.get_header_chain_gaps()
assert gaps == ((1, 999),)
assert header_gaps == ((1, 897),)
assert syncer.get_topmost_actionable_gap(gaps, header_gaps) == (899, 999)

async with run_peer_pool_event_server(
event_bus, server_peer_pool, handler_type=ETHPeerPoolEventServer
), background_asyncio_service(ETHRequestServer(
event_bus, TO_NETWORKING_BROADCAST_CONFIG, AsyncChainDB(chaindb_1000.db),
)):

server_peer.logger.info("%s is serving 1000 blocks", server_peer)
client_peer.logger.info("%s is syncing up 1000", client_peer)

async with background_asyncio_service(syncer):
# Wait for blocks backfilling
await wait_for_block(
chain_with_gaps, fat_chain.get_canonical_block_by_number(999), sync_timeout=20)

await asyncio.sleep(0.25)
assert chain_with_gaps.chaindb.get_chain_gaps() == (((1, 898),), 1001)


@pytest.mark.asyncio
async def test_header_gapfill_syncer(request,
event_loop,
Expand Down
47 changes: 42 additions & 5 deletions trinity/sync/beam/chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,10 @@
from trinity.constants import FIRE_AND_FORGET_BROADCASTING
from trinity.db.eth1.chain import BaseAsyncChainDB
from trinity.db.eth1.header import BaseAsyncHeaderDB
from trinity.exceptions import WitnessHashesUnavailable
from trinity.exceptions import (
WitnessHashesUnavailable,
BaseTrinityError,
)
from trinity.protocol.eth.peer import ETHPeerPool
from trinity.protocol.eth.sync import ETHHeaderChainSyncer
from trinity.protocol.wit.db import AsyncWitnessDB
Expand Down Expand Up @@ -452,12 +455,13 @@ def __init__(self,
self._peer_pool = peer_pool
self._pauser = Pauser()
self._body_syncer: FastChainBodySyncer = None
self._max_backfill_block_bodies_at_once = MAX_BACKFILL_BLOCK_BODIES_AT_ONCE

async def _setup_for_next_gap(self) -> None:
gap_start, gap_end = self._get_next_gap()
fill_start = BlockNumber(max(
gap_start,
gap_end - MAX_BACKFILL_BLOCK_BODIES_AT_ONCE,
gap_end - self._max_backfill_block_bodies_at_once,
))
start_num = BlockNumber(fill_start - 1)
_starting_tip = await self._db.coro_get_canonical_block_header_by_number(start_num)
Expand All @@ -484,10 +488,13 @@ async def _get_launch_header() -> BlockHeaderAPI:

def _get_next_gap(self) -> BlockRange:
gaps, future_tip_block = self._db.get_chain_gaps()
if len(gaps) == 0:
header_gaps, future_tip_header = self._db.get_header_chain_gaps()
try:
actionable_gap = self.get_topmost_actionable_gap(gaps, header_gaps)

except NoActionableGap:
# We do not have gaps in the chain of blocks but we may still have a gap from the last
# block up until the highest consecutive written header.
header_gaps, future_tip_header = self._db.get_header_chain_gaps()
if len(header_gaps) > 0:
# The header chain has gaps, find out the lowest missing header
lowest_missing_header, _ = header_gaps[0]
Expand All @@ -504,7 +511,30 @@ def _get_next_gap(self) -> BlockRange:
else:
raise ValidationError("No gaps in the chain of blocks")
else:
return gaps[-1]
return actionable_gap

def get_topmost_actionable_gap(self,
gaps: Tuple[BlockRange, ...],
header_gaps: Tuple[BlockRange, ...]) -> BlockRange:
'''
Returns the most recent gap of blocks of max size = _max_backfill_block_bodies_at_once
for which the headers exist in DB, along with the header preceding the gap.
'''
for gap in gaps[::-1]:
if gap[1] - gap[0] > self._max_backfill_block_bodies_at_once:
gap = (BlockNumber(gap[1] - self._max_backfill_block_bodies_at_once), gap[1])
# We want to be sure the header preceding the block gap is in DB
gap_with_prev_block = (BlockNumber(gap[0] - 1), gap[1])
for header_gap in header_gaps[::-1]:
if not self._have_empty_intersection(gap_with_prev_block, header_gap):
break
else:
return gap
else:
raise NoActionableGap

def _have_empty_intersection(self, block_gap: BlockRange, header_gap: BlockRange) -> bool:
return block_gap[0] > header_gap[1] or block_gap[1] < header_gap[0]

@property
def is_paused(self) -> bool:
Expand Down Expand Up @@ -550,6 +580,13 @@ async def run(self) -> None:
await self.manager.run_service(self._body_syncer)


class NoActionableGap(BaseTrinityError):
"""
Raised when no actionable gap of blocks is found.
"""
pass


class HeaderLaunchpointSyncer(HeaderSyncerAPI):
"""
Wraps a "real" header syncer, and drops headers on the floor, until triggered
Expand Down

0 comments on commit d501485

Please sign in to comment.