Skip to content

Commit

Permalink
[ETCM-247] Comment from review
Browse files Browse the repository at this point in the history
  • Loading branch information
mirkoAlic committed Oct 28, 2020
1 parent 925112f commit 2caf291
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 15 deletions.
8 changes: 4 additions & 4 deletions src/it/scala/io/iohk/ethereum/sync/RegularSyncItSpec.scala
Expand Up @@ -33,7 +33,7 @@ class RegularSyncItSpec extends FreeSpecBase with Matchers with BeforeAndAfterAl

"given a previously mined blockchain" in customTestCaseResourceM(FakePeer.start2FakePeersRes()) {
case (peer1, peer2) =>
val blockHeadersPerRequest = 200
val blockHeadersPerRequest = peer2.syncConfig.blockHeadersPerRequest
for {
_ <- peer1.startRegularSync()
_ <- peer1.mineNewBlocks(100.milliseconds, blockHeadersPerRequest + 1)(IdentityUpdate)
Expand All @@ -52,9 +52,9 @@ class RegularSyncItSpec extends FreeSpecBase with Matchers with BeforeAndAfterAl
) { case (peer1, peer2) =>
val blockNumer: Int = 2000
for {
_ <- peer1.importBlocksUntil(blockNumer)(IdentityUpdate)
_ <- peer1.startRegularSync()
_ <- peer2.startRegularSync()
_ <- peer1.importBlocksUntil(blockNumer)(IdentityUpdate)
_ <- peer2.connectToPeers(Set(peer1.node))
_ <- peer2.waitForRegularSyncLoadLastBlock(blockNumer)
_ <- peer2.mineNewBlocks(50.milliseconds, 2)(IdentityUpdate)
Expand All @@ -71,10 +71,10 @@ class RegularSyncItSpec extends FreeSpecBase with Matchers with BeforeAndAfterAl
) { case (peer1, peer2) =>
val blockNumer: Int = 2000
for {
_ <- peer1.startRegularSync()
_ <- peer2.startRegularSync()
_ <- peer1.importBlocksUntil(blockNumer)(IdentityUpdate)
_ <- peer2.importBlocksUntil(blockNumer)(IdentityUpdate)
_ <- peer1.startRegularSync()
_ <- peer2.startRegularSync()
_ <- peer1.mineNewBlock()(IdentityUpdate)
_ <- peer2.mineNewBlocks(100.milliseconds, 3)(IdentityUpdate)
_ <- peer2.waitForRegularSyncLoadLastBlock(blockNumer + 3)
Expand Down
Expand Up @@ -196,9 +196,10 @@ class BlockFetcher(
fetchBlocks(newState)
}
//keep fetcher state updated in case new checkpoint block or mined block was imported
case LastBlockChanged(blockNr) => {
case InternalLastBlockImport(blockNr) => {
log.debug(s"New last block $blockNr imported from the inside")
val newState = state.withLastBlock(blockNr).withPossibleNewTopAt(blockNr)
val newLastBlock = blockNr.max(state.lastBlock)
val newState = state.withLastBlock(newLastBlock).withPossibleNewTopAt(blockNr)
fetchBlocks(newState)
}
}
Expand Down Expand Up @@ -326,7 +327,7 @@ object BlockFetcher {
new InvalidateBlocksFrom(from, reason, toBlacklist)
}
case class BlockImportFailed(blockNr: BigInt, reason: String) extends FetchMsg
case class LastBlockChanged(blockNr: BigInt) extends FetchMsg
case class InternalLastBlockImport(blockNr: BigInt) extends FetchMsg
case object RetryBodiesRequest extends FetchMsg
case object RetryHeadersRequest extends FetchMsg

Expand Down
Expand Up @@ -205,30 +205,30 @@ class BlockImporter(
block,
new MinedBlockImportMessages(block),
informFetcherOnFail = false,
informFetcherOnLastBlockChanged = true
informFetcherOnInternalLastBlockImport = true
)(state)

private def importCheckpointBlock(block: Block, state: ImporterState): Unit =
importBlock(
block,
new CheckpointBlockImportMessages(block),
informFetcherOnFail = false,
informFetcherOnLastBlockChanged = true
informFetcherOnInternalLastBlockImport = true
)(state)

private def importNewBlock(block: Block, peerId: PeerId, state: ImporterState): Unit =
importBlock(
block,
new NewBlockImportMessages(block, peerId),
informFetcherOnFail = true,
informFetcherOnLastBlockChanged = false
informFetcherOnInternalLastBlockImport = false
)(state)

private def importBlock(
block: Block,
importMessages: ImportMessages,
informFetcherOnFail: Boolean,
informFetcherOnLastBlockChanged: Boolean
informFetcherOnInternalLastBlockImport: Boolean
): ImportFn = {
def doLog(entry: ImportMessages.LogEntry): Unit = log.log(entry._1, entry._2)

Expand All @@ -242,8 +242,8 @@ class BlockImporter(
val (blocks, tds) = importedBlocksData.map(data => (data.block, data.td)).unzip
broadcastBlocks(blocks, tds)
updateTxPool(importedBlocksData.map(_.block), Seq.empty)
if (informFetcherOnLastBlockChanged) {
fetcher ! BlockFetcher.LastBlockChanged(blocks.last.number)
if (informFetcherOnInternalLastBlockImport) {
fetcher ! BlockFetcher.InternalLastBlockImport(blocks.last.number)
}

case BlockEnqueued => ()
Expand All @@ -255,8 +255,8 @@ class BlockImporter(
case ChainReorganised(oldBranch, newBranch, totalDifficulties) =>
updateTxPool(newBranch, oldBranch)
broadcastBlocks(newBranch, totalDifficulties)
if (informFetcherOnLastBlockChanged) {
fetcher ! BlockFetcher.LastBlockChanged(newBranch.last.number)
if (informFetcherOnInternalLastBlockImport) {
fetcher ! BlockFetcher.InternalLastBlockImport(newBranch.last.number)
}

case BlockImportFailed(error) =>
Expand Down

0 comments on commit 2caf291

Please sign in to comment.