Skip to content

Commit

Permalink
etcm-73 made part of the blockchain transactional
Browse files Browse the repository at this point in the history
  • Loading branch information
pslaski committed Sep 21, 2020
1 parent 4684082 commit 01484e1
Show file tree
Hide file tree
Showing 66 changed files with 873 additions and 705 deletions.
@@ -1,12 +1,12 @@
package io.iohk.ethereum.mpt

import io.iohk.ethereum.{ObjectGenerators, crypto}
import io.iohk.ethereum.db.dataSource.EphemDataSource
import io.iohk.ethereum.db.storage.{ArchiveNodeStorage, MptStorage, NodeStorage, SerializingMptStorage}
import io.iohk.ethereum.mpt.MerklePatriciaTrie.defaultByteArraySerializable
import io.iohk.ethereum.utils.Logger
import org.scalatest.FunSuite
import io.iohk.ethereum.{ObjectGenerators, crypto}
import org.bouncycastle.util.encoders.Hex
import org.scalatest.FunSuite
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks

class MerklePatriciaTreeSpeedSpec
Expand Down
Expand Up @@ -6,7 +6,7 @@ import io.iohk.ethereum.consensus.ethash.EthashConsensus
import io.iohk.ethereum.consensus.ethash.validators.ValidatorsExecutor
import io.iohk.ethereum.consensus.{ConsensusConfig, FullConsensusConfig, TestConsensus, ethash}
import io.iohk.ethereum.db.components.Storages.PruningModeComponent
import io.iohk.ethereum.db.components.{SharedEphemDataSources, Storages}
import io.iohk.ethereum.db.components.{EphemDataSourceComponent, Storages}
import io.iohk.ethereum.db.storage.pruning.{ArchivePruning, PruningMode}
import io.iohk.ethereum.domain.Block.BlockDec
import io.iohk.ethereum.domain._
Expand Down Expand Up @@ -36,7 +36,7 @@ object ScenarioSetup {


def getBlockchain: BlockchainImpl = {
val storagesInstance = new SharedEphemDataSources with Pruning with Storages.DefaultStorages
val storagesInstance = new EphemDataSourceComponent with Pruning with Storages.DefaultStorages
BlockchainImpl(storagesInstance.storages)
}
}
Expand Down Expand Up @@ -75,9 +75,11 @@ abstract class ScenarioSetup(_vm: VMImpl, scenario: BlockchainScenario) {
Block(scenario.genesisBlockHeader.toBlockHeader, BlockBody(Nil, Nil))
}

blockchain.save(genesisBlock)
blockchain.save(genesisBlock.header.hash, Nil)
blockchain.save(genesisBlock.header.hash, genesisBlock.header.difficulty)
blockchain.storeBlock(genesisBlock)
.and(blockchain.storeReceipts(genesisBlock.header.hash, Nil))
.and(blockchain.storeTotalDifficulty(genesisBlock.header.hash, genesisBlock.header.difficulty))
.commit()

genesisBlock
}

Expand Down
Expand Up @@ -4,7 +4,8 @@ import java.io.File
import java.nio.file.Files
import akka.util.ByteString
import io.iohk.ethereum.ObjectGenerators
import io.iohk.ethereum.db.dataSource.DataSource
import io.iohk.ethereum.db.dataSource.{DataSource, DataSourceUpdate}
import io.iohk.ethereum.db.dataSource.DataSource.{Key, Namespace, Value}
import org.scalatest.FlatSpec
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks

Expand All @@ -30,12 +31,19 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
}
}

def prepareUpdate(
namespace: Namespace = OtherNamespace,
toRemove: Seq[Key] = Nil,
toUpsert: Seq[(Key, Value)] = Nil
): Seq[DataSourceUpdate] =
Seq(DataSourceUpdate(namespace, toRemove, toUpsert))

def updateInSeparateCalls(
dataSource: DataSource,
toUpsert: Seq[(ByteString, ByteString)]
): DataSource = {
toUpsert.foldLeft(dataSource) { case (recDB, keyValuePair) =>
recDB.update(OtherNamespace, Seq(), Seq(keyValuePair))
dataSource: DataSource,
toUpsert: Seq[(ByteString, ByteString)]
): Unit = {
toUpsert.foreach { keyValuePair =>
dataSource.update(prepareUpdate(toUpsert = Seq(keyValuePair)))
}
}

Expand All @@ -45,8 +53,9 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = updateInSeparateCalls(
dataSource = createDataSource(path),
val db = createDataSource(path)
updateInSeparateCalls(
dataSource = db,
toUpsert = keyList.zip(keyList)
)
keyList.foreach { key =>
Expand All @@ -62,11 +71,8 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path).update(
OtherNamespace,
Seq(),
keyList.zip(keyList)
)
val db = createDataSource(path)
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))

keyList.foreach { key =>
assert(db.get(OtherNamespace, key).contains(key))
Expand All @@ -81,21 +87,18 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path).update(
OtherNamespace,
Seq(),
keyList.zip(keyList)
)
val db = createDataSource(path)
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))

val keyListWithExtraByte = keyList.map(1.toByte +: _)
val dbAfterUpdate =
updateInSeparateCalls(db, keyList.zip(keyListWithExtraByte))
updateInSeparateCalls(db, keyList.zip(keyListWithExtraByte))

keyList.zip(keyListWithExtraByte).foreach { case (key, value) =>
assert(dbAfterUpdate.get(OtherNamespace, key).contains(value))
keyList.zip(keyListWithExtraByte).foreach {
case (key, value) =>
assert(db.get(OtherNamespace, key).contains(value))
}

dbAfterUpdate.destroy()
db.destroy()
}
}
}
Expand All @@ -104,24 +107,18 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path).update(
OtherNamespace,
Seq(),
keyList.zip(keyList)
)
val db = createDataSource(path)
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))

val keyListWithExtraByte = keyList.map(1.toByte +: _)
val dbAfterUpdate = db.update(
OtherNamespace,
Seq(),
keyList.zip(keyListWithExtraByte)
)
db.update(prepareUpdate(toUpsert = keyList.zip(keyListWithExtraByte)))

keyList.zip(keyListWithExtraByte).foreach { case (key, value) =>
assert(dbAfterUpdate.get(OtherNamespace, key).contains(value))
keyList.zip(keyListWithExtraByte).foreach {
case (key, value) =>
assert(db.get(OtherNamespace, key).contains(value))
}

dbAfterUpdate.destroy()
db.destroy()
}
}
}
Expand All @@ -131,12 +128,8 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path)
.update(
namespace = OtherNamespace,
toRemove = Seq(),
toUpsert = keyList.zip(keyList)
)
.clear
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))
db.clear()

keyList.foreach { key =>
assert(db.get(OtherNamespace, key).isEmpty)
Expand All @@ -151,11 +144,8 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
withDir { path =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path).update(
namespace = OtherNamespace,
toRemove = Seq(),
toUpsert = keyList.zip(keyList)
)
val db = createDataSource(path)
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))
db.close()

val dbAfterClose = createDataSource(path)
Expand All @@ -172,14 +162,11 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
withDir { path =>
forAll(seqByteStringOfNItemsGen(KeySizeWithoutPrefix)) { unFilteredKeyList: Seq[ByteString] =>
val keyList = unFilteredKeyList.take(KeyNumberLimit)
val db = createDataSource(path).update(
namespace = OtherNamespace,
toRemove = Seq(),
toUpsert = keyList.zip(keyList)
)
val db = createDataSource(path)
db.update(prepareUpdate(toUpsert = keyList.zip(keyList)))
db.destroy()

assert(!new File(path).exists())
assert(!new File("/tmp/iodbDestroy").exists())

val dbAfterDestroy = createDataSource(path)
keyList.foreach { key =>
Expand All @@ -199,15 +186,15 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
val db = createDataSource(path)

val valList1 = keyList.map(1.toByte +: _)
db.update(OtherNamespace, Seq(), keyList.zip(valList1))
db.update(prepareUpdate(namespace = OtherNamespace, toUpsert = keyList.zip(valList1)))

val valList2 = keyList.map(2.toByte +: _)
db.update(OtherNamespace2, Seq(), keyList.zip(valList2))
db.update(prepareUpdate(namespace = OtherNamespace2, toUpsert = keyList.zip(valList2)))

keyList.zip(valList1).foreach { case (key, value) =>
assert(db.get(OtherNamespace, key).contains(value))
keyList.zip(valList1).foreach {
case (key, value) =>
assert(db.get(OtherNamespace, key).contains(value))
}

keyList.zip(valList2).foreach { case (key, value) =>
assert(db.get(OtherNamespace2, key).contains(value))
}
Expand All @@ -225,31 +212,31 @@ trait DataSourceIntegrationTestBehavior extends ScalaCheckPropertyChecks with Ob
val db = createDataSource(path)

val valList1 = keyList.map(1.toByte +: _)
db.update(OtherNamespace, Seq(), keyList.zip(valList1))
db.update(prepareUpdate(namespace = OtherNamespace, toUpsert = keyList.zip(valList1)))

val valList2 = keyList.map(2.toByte +: _)
db.update(OtherNamespace2, Seq(), keyList.zip(valList2))
db.update(prepareUpdate(namespace = OtherNamespace2, toUpsert = keyList.zip(valList2)))

//Removal of keys from the OtherNamespace namespace
db.update(OtherNamespace, keyList, Nil)
db.update(prepareUpdate(namespace = OtherNamespace, toRemove = keyList))

keyList.foreach { key =>
assert(db.get(OtherNamespace, key).isEmpty)
}
keyList.zip(valList2).foreach { case (key, value) =>
assert(db.get(OtherNamespace2, key).contains(value))
keyList.zip(valList2).foreach {
case (key, value) =>
assert(db.get(OtherNamespace2, key).contains(value))
}

//Removal of keys from the OtherNamespace2 namespace
db.update(OtherNamespace2, keyList, Nil)
db.update(prepareUpdate(namespace = OtherNamespace2, toRemove = keyList))

keyList.foreach { key =>
assert(db.get(OtherNamespace, key).isEmpty)
}
keyList.foreach { key =>
assert(db.get(OtherNamespace2, key).isEmpty)
}

db.destroy()
}
}
Expand Down
15 changes: 8 additions & 7 deletions src/it/scala/io/iohk/ethereum/txExecTest/util/DumpChainApp.scala
Expand Up @@ -4,7 +4,7 @@ import akka.actor.ActorSystem
import akka.util.ByteString
import com.typesafe.config.ConfigFactory
import io.iohk.ethereum.db.components.Storages.PruningModeComponent
import io.iohk.ethereum.db.components.{SharedRocksDbDataSources, Storages}
import io.iohk.ethereum.db.components.{RocksDbDataSourceComponent, Storages}
import io.iohk.ethereum.db.storage.{AppStateStorage, StateStorage}
import io.iohk.ethereum.db.storage.NodeStorage.{NodeEncoded, NodeHash}
import io.iohk.ethereum.db.storage.TransactionMappingStorage.TransactionLocation
Expand All @@ -22,6 +22,7 @@ import io.iohk.ethereum.nodebuilder.{AuthHandshakerBuilder, NodeKeyBuilder, Secu
import io.iohk.ethereum.utils.{Config, NodeStatus, ServerStatus}
import java.util.concurrent.atomic.AtomicReference

import io.iohk.ethereum.db.dataSource.DataSourceBatchUpdate
import org.bouncycastle.util.encoders.Hex

import scala.concurrent.duration._
Expand Down Expand Up @@ -59,7 +60,7 @@ object DumpChainApp extends App with NodeKeyBuilder with SecureRandomBuilder wit
trait PruningConfig extends PruningModeComponent {
override val pruningMode: PruningMode = ArchivePruning
}
val storagesInstance = new SharedRocksDbDataSources with PruningConfig with Storages.DefaultStorages
val storagesInstance = new RocksDbDataSourceComponent with PruningConfig with Storages.DefaultStorages

val blockchain: Blockchain = new BlockchainMock(genesisHash)

Expand Down Expand Up @@ -114,15 +115,15 @@ object DumpChainApp extends App with NodeKeyBuilder with SecureRandomBuilder wit

override def getMptNodeByHash(hash: ByteString): Option[MptNode] = ???

override def save(blockHeader: BlockHeader): Unit = ???
override def storeBlockHeader(blockHeader: BlockHeader): DataSourceBatchUpdate = ???

override def save(blockHash: ByteString, blockBody: BlockBody): Unit = ???
override def storeBlockBody(blockHash: ByteString, blockBody: BlockBody): DataSourceBatchUpdate = ???

override def save(blockHash: ByteString, receipts: Seq[Receipt]): Unit = ???
override def storeReceipts(blockHash: ByteString, receipts: Seq[Receipt]): DataSourceBatchUpdate = ???

override def save(hash: ByteString, evmCode: ByteString): Unit = ???
override def storeEvmCode(hash: ByteString, evmCode: ByteString): DataSourceBatchUpdate = ???

override def save(blockhash: ByteString, totalDifficulty: BigInt): Unit = ???
override def storeTotalDifficulty(blockhash: ByteString, totalDifficulty: BigInt): DataSourceBatchUpdate = ???

override def saveNode(nodeHash: NodeHash, nodeEncoded: NodeEncoded, blockNumber: BigInt): Unit = ???

Expand Down
34 changes: 19 additions & 15 deletions src/it/scala/io/iohk/ethereum/txExecTest/util/FixtureProvider.scala
Expand Up @@ -3,7 +3,6 @@ package io.iohk.ethereum.txExecTest.util
import java.io.Closeable

import akka.util.ByteString
import io.iohk.ethereum.db.dataSource.EphemDataSource
import io.iohk.ethereum.db.storage._
import io.iohk.ethereum.domain._
import io.iohk.ethereum.domain.BlockHeader._
Expand All @@ -12,6 +11,7 @@ import io.iohk.ethereum.network.p2p.messages.PV63._
import MptNodeEncoders._
import ReceiptImplicits._
import io.iohk.ethereum.db.cache.{AppCaches, LruCache}
import io.iohk.ethereum.db.components.EphemDataSourceComponent
import io.iohk.ethereum.db.storage.NodeStorage.NodeHash
import io.iohk.ethereum.db.storage.pruning.{ArchivePruning, PruningMode}
import io.iohk.ethereum.mpt.{BranchNode, ExtensionNode, HashNode, LeafNode, MptNode}
Expand All @@ -37,19 +37,19 @@ object FixtureProvider {
// scalastyle:off
def prepareStorages(blockNumber: BigInt, fixtures: Fixture): BlockchainStorages = {

val storages: BlockchainStorages = new BlockchainStorages with AppCaches {
val storages: BlockchainStorages = new BlockchainStorages with AppCaches with EphemDataSourceComponent {

override val receiptStorage: ReceiptStorage = new ReceiptStorage(EphemDataSource())
override val evmCodeStorage: EvmCodeStorage = new EvmCodeStorage(EphemDataSource())
override val blockHeadersStorage: BlockHeadersStorage = new BlockHeadersStorage(EphemDataSource())
override val blockNumberMappingStorage: BlockNumberMappingStorage = new BlockNumberMappingStorage(EphemDataSource())
override val blockBodiesStorage: BlockBodiesStorage = new BlockBodiesStorage(EphemDataSource())
override val totalDifficultyStorage: TotalDifficultyStorage = new TotalDifficultyStorage(EphemDataSource())
override val transactionMappingStorage: TransactionMappingStorage = new TransactionMappingStorage(EphemDataSource())
override val nodeStorage: NodeStorage = new NodeStorage(EphemDataSource())
override val receiptStorage: ReceiptStorage = new ReceiptStorage(dataSource)
override val evmCodeStorage: EvmCodeStorage = new EvmCodeStorage(dataSource)
override val blockHeadersStorage: BlockHeadersStorage = new BlockHeadersStorage(dataSource)
override val blockNumberMappingStorage: BlockNumberMappingStorage = new BlockNumberMappingStorage(dataSource)
override val blockBodiesStorage: BlockBodiesStorage = new BlockBodiesStorage(dataSource)
override val totalDifficultyStorage: TotalDifficultyStorage = new TotalDifficultyStorage(dataSource)
override val transactionMappingStorage: TransactionMappingStorage = new TransactionMappingStorage(dataSource)
override val nodeStorage: NodeStorage = new NodeStorage(dataSource)
override val cachedNodeStorage: CachedNodeStorage = new CachedNodeStorage(nodeStorage, caches.nodeCache)
override val pruningMode: PruningMode = ArchivePruning
override val appStateStorage: AppStateStorage = new AppStateStorage(EphemDataSource())
override val appStateStorage: AppStateStorage = new AppStateStorage(dataSource)
override val stateStorage: StateStorage =
StateStorage(
pruningMode,
Expand All @@ -63,10 +63,14 @@ object FixtureProvider {
val blockchain = BlockchainImpl(storages)

blocksToInclude.foreach { case (_, block) =>
val receiptsUpdates = fixtures.receipts.get(block.header.hash)
.map(r => storages.receiptStorage.put(block.header.hash, r))
.getOrElse(storages.receiptStorage.emptyBatchUpdate)
storages.blockBodiesStorage.put(block.header.hash, fixtures.blockBodies(block.header.hash))
storages.blockHeadersStorage.put(block.header.hash, fixtures.blockHeaders(block.header.hash))
storages.blockNumberMappingStorage.put(block.header.number, block.header.hash)
fixtures.receipts.get(block.header.hash).foreach(r => storages.receiptStorage.put(block.header.hash, r))
.and(storages.blockHeadersStorage.put(block.header.hash, fixtures.blockHeaders(block.header.hash)))
.and(storages.blockNumberMappingStorage.put(block.header.number, block.header.hash))
.and(receiptsUpdates)
.commit()

def traverse(nodeHash: ByteString): Unit = fixtures.stateMpt.get(nodeHash).orElse(fixtures.contractMpts.get(nodeHash)) match {
case Some(m: BranchNode) =>
Expand All @@ -85,7 +89,7 @@ object FixtureProvider {
storages.stateStorage.saveNode(ByteString(m.hash), m.toBytes, block.header.number)
Try(m.value.toArray[Byte].toAccount).toOption.foreach { account =>
if (account.codeHash != DumpChainActor.emptyEvm) {
storages.evmCodeStorage.put(account.codeHash, fixtures.evmCode(account.codeHash))
storages.evmCodeStorage.put(account.codeHash, fixtures.evmCode(account.codeHash)).commit()
}
if (account.storageRoot != DumpChainActor.emptyStorage) {
traverse(account.storageRoot)
Expand Down

0 comments on commit 01484e1

Please sign in to comment.