Skip to content

Commit

Permalink
Update benchmarks
Browse files Browse the repository at this point in the history
  • Loading branch information
benaadams committed Feb 8, 2023
1 parent 57bee33 commit a7ae5f1
Show file tree
Hide file tree
Showing 4 changed files with 256 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,48 @@ public class LruCacheAddAtCapacityBenchmarks
const int Capacity = 16;
private object _object = new object();
private LruCache<int, object> shared;
private ICache<int, object> previous;


[GlobalSetup]
public void Setup()
{
shared = new LruCache<int, object>(Capacity, Capacity, string.Empty);
previous = new PreviousLruCache<int, object>(Capacity, Capacity, string.Empty);
}

[Benchmark]
public ICache<int, object> WithRecreation()
{
LruCache<int, object> cache = new LruCache<int, object>(Capacity, Capacity, string.Empty);
for (int j = 0; j < 1024 * 64; j++)
Fill(cache);

return cache;

void Fill(LruCache<int, object> cache)
{
cache.Set(j, _object);
for (int j = 0; j < 1024 * 64; j++)
{
cache.Set(j, _object);
}
}
}

[Benchmark(Baseline = true)]
public ICache<int, object> WithRecreation_Previous()
{
ICache<int, object> cache = new PreviousLruCache<int, object>(Capacity, Capacity, string.Empty);
Fill(cache);

return cache;

void Fill(ICache<int, object> cache)
{
for (int j = 0; j < 1024 * 64; j++)
{
cache.Set(j, _object);
}
}
}

[Benchmark]
Expand All @@ -40,5 +65,16 @@ public void WithClear()

shared.Clear();
}

[Benchmark(Baseline = true)]
public void WithClear_Previous()
{
for (int j = 0; j < 1024 * 64; j++)
{
previous.Set(j, _object);
}

previous.Clear();
}
}
}
26 changes: 24 additions & 2 deletions src/Nethermind/Nethermind.Benchmark/Core/LruCacheBenchmarks.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,34 @@ public class LruCacheBenchmarks
public LruCache<int, object> WithItems()
{
LruCache<int, object> cache = new LruCache<int, object>(16, StartCapacity, string.Empty);
for (int j = 0; j < ItemsCount; j++)
Fill(cache);

return cache;

void Fill(LruCache<int, object> cache)
{
cache.Set(j, new object());
for (int j = 0; j < ItemsCount; j++)
{
cache.Set(j, new object());
}
}
}

[Benchmark(Baseline = true)]
public ICache<int, object> WithItems_Previous()
{
ICache<int, object> cache = new PreviousLruCache<int, object>(16, StartCapacity, string.Empty);
Fill(cache);

return cache;

void Fill(ICache<int, object> cache)
{
for (int j = 0; j < ItemsCount; j++)
{
cache.Set(j, new object());
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,26 +20,48 @@ public void InitKeccaks()
}
}

[Params(0, 2, 4, 8, 16, 32)]
public int StartCapacity { get; set; }
[Params(16, 32, 128)]
public int MaxCapacity { get; set; }

[Params(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28)]
[Params(1, 2, 8, 32, 64)]
public int ItemsCount { get; set; }

public Keccak[] Keys { get; set; } = new Keccak[28];
public Keccak[] Keys { get; set; } = new Keccak[64];

public byte[] Value { get; set; } = new byte[0];

[Benchmark]
public LruCache<Keccak, byte[]> WithItems()
public LruCache<KeccakKey, byte[]> WithItems()
{
LruCache<Keccak, byte[]> cache = new LruCache<Keccak, byte[]>(128, StartCapacity, String.Empty);
for (int j = 0; j < ItemsCount; j++)
LruCache<KeccakKey, byte[]> cache = new LruCache<KeccakKey, byte[]>(MaxCapacity, MaxCapacity, String.Empty);
Fill(cache);

return cache;

void Fill(LruCache<KeccakKey, byte[]> cache)
{
cache.Set(Keys[j], Value);
for (int j = 0; j < ItemsCount; j++)
{
cache.Set(Keys[j], Value);
}
}
}

[Benchmark(Baseline = true)]
public ICache<Keccak, byte[]> WithItems_Previous()
{
ICache<Keccak, byte[]> cache = new PreviousLruCache<Keccak, byte[]>(MaxCapacity, MaxCapacity, String.Empty);
Fill(cache);

return cache;

void Fill(ICache<Keccak, byte[]> cache)
{
for (int j = 0; j < ItemsCount; j++)
{
cache.Set(Keys[j], Value);
}
}
}
}
}
164 changes: 164 additions & 0 deletions src/Nethermind/Nethermind.Benchmark/Core/PreviousLruCache.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;

using Nethermind.Core.Caching;
using Nethermind.Core;

namespace Nethermind.Benchmarks.Core;

public class PreviousLruCache<TKey, TValue> : ICache<TKey, TValue> where TKey : notnull
{
private readonly int _maxCapacity;
private readonly Dictionary<TKey, LinkedListNode<LruCacheItem>> _cacheMap;
private readonly LinkedList<LruCacheItem> _lruList;

public PreviousLruCache(int maxCapacity, int startCapacity, string name)
{
if (maxCapacity < 1)
{
throw new ArgumentOutOfRangeException();
}

_maxCapacity = maxCapacity;
_cacheMap = typeof(TKey) == typeof(byte[])
? new Dictionary<TKey, LinkedListNode<LruCacheItem>>((IEqualityComparer<TKey>)Nethermind.Core.Extensions.Bytes.EqualityComparer)
: new Dictionary<TKey, LinkedListNode<LruCacheItem>>(startCapacity); // do not initialize it at the full capacity
_lruList = new LinkedList<LruCacheItem>();
}

public PreviousLruCache(int maxCapacity, string name)
: this(maxCapacity, 0, name)
{
}

[MethodImpl(MethodImplOptions.Synchronized)]
public void Clear()
{
_cacheMap.Clear();
_lruList.Clear();
}

[MethodImpl(MethodImplOptions.Synchronized)]
public TValue Get(TKey key)
{
if (_cacheMap.TryGetValue(key, out LinkedListNode<LruCacheItem>? node))
{
TValue value = node.Value.Value;
_lruList.Remove(node);
_lruList.AddLast(node);
return value;
}

#pragma warning disable 8603
// fixed C# 9
return default;
#pragma warning restore 8603
}

[MethodImpl(MethodImplOptions.Synchronized)]
public bool TryGet(TKey key, out TValue value)
{
if (_cacheMap.TryGetValue(key, out LinkedListNode<LruCacheItem>? node))
{
value = node.Value.Value;
_lruList.Remove(node);
_lruList.AddLast(node);
return true;
}

#pragma warning disable 8601
// fixed C# 9
value = default;
#pragma warning restore 8601
return false;
}

[MethodImpl(MethodImplOptions.Synchronized)]
public bool Set(TKey key, TValue val)
{
if (val is null)
{
return Delete(key);
}

if (_cacheMap.TryGetValue(key, out LinkedListNode<LruCacheItem>? node))
{
node.Value.Value = val;
_lruList.Remove(node);
_lruList.AddLast(node);
return false;
}
else
{
if (_cacheMap.Count >= _maxCapacity)
{
Replace(key, val);
}
else
{
LruCacheItem cacheItem = new LruCacheItem(key, val);
LinkedListNode<LruCacheItem> newNode = new LinkedListNode<LruCacheItem>(cacheItem);
_lruList.AddLast(newNode);
_cacheMap.Add(key, newNode);
}

return true;
}
}

[MethodImpl(MethodImplOptions.Synchronized)]
public bool Delete(TKey key)
{
if (_cacheMap.TryGetValue(key, out LinkedListNode<LruCacheItem>? node))
{
_lruList.Remove(node);
_cacheMap.Remove(key);
return true;
}

return false;
}

[MethodImpl(MethodImplOptions.Synchronized)]
public bool Contains(TKey key) => _cacheMap.ContainsKey(key);

[MethodImpl(MethodImplOptions.Synchronized)]
public IDictionary<TKey, TValue> Clone() => _lruList.ToDictionary(i => i.Key, i => i.Value);

private void Replace(TKey key, TValue value)
{
LinkedListNode<LruCacheItem>? node = _lruList.First;
_lruList.RemoveFirst();
_cacheMap.Remove(node!.Value.Key);

node.Value.Value = value;
node.Value.Key = key;
_lruList.AddLast(node);
_cacheMap.Add(key, node);
}

private class LruCacheItem
{
public LruCacheItem(TKey k, TValue v)
{
Key = k;
Value = v;
}

public TKey Key;
public TValue Value;
}

public long MemorySize => CalculateMemorySize(0, _cacheMap.Count);

public static long CalculateMemorySize(int keyPlusValueSize, int currentItemsCount)
{
// it may actually be different if the initial capacity not equal to max (depending on the dictionary growth path)

const int preInit = 48 /* LinkedList */ + 80 /* Dictionary */ + 24;
int postInit = 52 /* lazy init of two internal dictionary arrays + dictionary size times (entry size + int) */ + MemorySizes.FindNextPrime(currentItemsCount) * 28 + currentItemsCount * 80 /* LinkedListNode and CacheItem times items count */;
return MemorySizes.Align(preInit + postInit + keyPlusValueSize * currentItemsCount);
}
}

0 comments on commit a7ae5f1

Please sign in to comment.