Skip to content

Commit

Permalink
Add temporal profiling; too dirty to keep it in the codebase
Browse files Browse the repository at this point in the history
  • Loading branch information
mkalinin committed Dec 28, 2017
1 parent 1a47888 commit d6d9dac
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.ethereum.config.CommonConfig;
import org.ethereum.config.SystemProperties;
import org.ethereum.crypto.HashUtil;
import org.ethereum.datasource.JournalSource;
import org.ethereum.datasource.inmem.HashMapDB;
import org.ethereum.db.*;
import org.ethereum.trie.Trie;
Expand Down Expand Up @@ -403,9 +404,14 @@ private synchronized BlockSummary tryConnectAndFork(final Block block) {
return summary;
}


private static final Logger loggerPrune = LoggerFactory.getLogger("prune");
long processing = 0;
long pruningTotal = 0;
long processingTotal = 0;
public synchronized ImportResult tryToConnect(final Block block) {

long s = System.currentTimeMillis();

if (logger.isDebugEnabled())
logger.debug("Try connect block hash: {}, number: {}",
Hex.toHexString(block.getHash()).substring(0, 6),
Expand Down Expand Up @@ -460,6 +466,19 @@ public synchronized ImportResult tryToConnect(final Block block) {
}
}

processing += System.currentTimeMillis() - s;

if (block.getNumber() > 0 && block.getNumber() % 1000 == 0) {
processingTotal += processing;
pruningTotal += JournalSource.pruneTime;

loggerPrune.info("Prune time: instant {}/{} : {}, total {}/{} : {}",
JournalSource.pruneTime, processing, String.format("%.4f", (double) JournalSource.pruneTime / processing),
pruningTotal, processingTotal, String.format("%.4f", (double) pruningTotal / processingTotal));

processing = JournalSource.pruneTime = 0;
}

return ret;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@
public class JournalSource<V> extends AbstractChainedSource<byte[], V, byte[], V>
implements HashedKeySource<byte[], V> {

public static long pruneTime = 0;

private static class Update {
byte[] updateHash;
List<byte[]> insertedKeys = new ArrayList<>();
Expand Down Expand Up @@ -128,8 +130,10 @@ public synchronized void put(byte[] key, V val) {
return;
}

long s = System.currentTimeMillis();
currentUpdate.insertedKeys.add(key);
pruning.inserted(key);
pruneTime += System.currentTimeMillis() - s;
getSource().put(key, val);
}

Expand All @@ -140,8 +144,10 @@ public synchronized void put(byte[] key, V val) {
*/
@Override
public synchronized void delete(byte[] key) {
long s = System.currentTimeMillis();
currentUpdate.deletedKeys.add(key);
pruning.deleted(key);
pruneTime += System.currentTimeMillis() - s;
}

@Override
Expand All @@ -158,9 +164,11 @@ public synchronized V get(byte[] key) {
* via revertUpdate call
*/
public synchronized void commitUpdates(byte[] updateHash) {
long s = System.currentTimeMillis();
currentUpdate.updateHash = updateHash;
journal.put(updateHash, currentUpdate);
currentUpdate = new Update();
pruneTime += System.currentTimeMillis() - s;
}

/**
Expand All @@ -174,19 +182,22 @@ public synchronized boolean hasUpdate(byte[] updateHash) {
* Given update hash prunes touched nodes
*/
public synchronized void persistUpdate(byte[] updateHash) {
long s = System.currentTimeMillis();
Update update = journal.get(updateHash);
if (update == null) throw new RuntimeException("No update found: " + Hex.toHexString(updateHash));

update.insertedKeys.forEach(this::firePruning);
update.deletedKeys.forEach(this::firePruning);

journal.delete(updateHash);
pruneTime += System.currentTimeMillis() - s;
}

/**
* Reverts all changes made under this update hash and prunes touched nodes
*/
public synchronized void revertUpdate(byte[] updateHash) {
long s = System.currentTimeMillis();
Update update = journal.get(updateHash);
if (update == null) throw new RuntimeException("No update found: " + Hex.toHexString(updateHash));

Expand All @@ -197,6 +208,7 @@ public synchronized void revertUpdate(byte[] updateHash) {
update.deletedKeys.forEach(this::firePruning);

journal.delete(updateHash);
pruneTime += System.currentTimeMillis() - s;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public class ReadCache<Key, Value> extends AbstractCachedSource<Key, Value> {

private final Value NULL = (Value) new Object();

private Map<Key, Value> cache;
protected Map<Key, Value> cache;
private boolean byteKeyMap;

public ReadCache(Source<Key, Value> src) {
Expand Down Expand Up @@ -110,10 +110,18 @@ public Value get(Key key) {
ret = getSource().get(key);
cache.put(key, ret == null ? NULL : ret);
cacheAdded(key, ret);
onSourceHit(ret);
}
onGet(ret);
return ret;
}

protected void onSourceHit(Value val) {
}

protected void onGet(Value val) {
}

@Override
public void delete(Key key) {
checkByteArrKey(key);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

import org.ethereum.crypto.HashUtil;
import org.ethereum.datasource.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Persistent source of {@link PruneEntry} data. <br/>
Expand All @@ -29,6 +31,8 @@
*/
public class PruneEntrySource implements Source<byte[], PruneEntry> {

private static final Logger logger = LoggerFactory.getLogger("prune");

private byte[] filterKey = HashUtil.sha3("filterKey".getBytes());

Source<byte[], byte[]> store;
Expand All @@ -53,7 +57,40 @@ protected WriteCache<byte[], PruneEntry> createCache(Source<byte[], PruneEntry>
}.withName("pruneSource");

// 64 bytes - rounded up size of the entry
readCache = new ReadCache.BytesKey<>(writeCache).withMaxCapacity(cacheSize * 1024 * 1024 / 64);
readCache = new ReadCache.BytesKey<PruneEntry>(writeCache) {

int srcHits = 0;
int srcHitsTotal = 0;
int falseHits = 0;
int falseHitsTotal = 0;
int hits = 0;
int maxEntries = 0;

@Override
protected void onSourceHit(PruneEntry val) {
++srcHits;
++srcHitsTotal;
if (val == null) {
++falseHits;
++falseHitsTotal;
}
}

@Override
protected void onGet(PruneEntry val) {

maxEntries = Math.max(maxEntries, cache.size());

if (++hits % 100_000 == 0) {
logger.info("ReadCache: srcHits {}/{} : {}/{} falseHits {}/{} : {}/{} entries {}/{}",
srcHits, srcHitsTotal, String.format("%.4f", (double) srcHits / 100_000), String.format("%.4f", (double) srcHitsTotal / hits),
falseHits, falseHitsTotal, String.format("%.4f", (double) falseHits / 100_000), String.format("%.4f", (double) falseHitsTotal / hits),
cache.size(), maxEntries);
falseHits = srcHits = 0;
}
}

}.withMaxCapacity(cacheSize * 1024 * 1024 / 64);

byte[] filterBytes = store.get(filterKey);
if (filterBytes != null) {
Expand Down

0 comments on commit d6d9dac

Please sign in to comment.