diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuSoakTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuSoakTests.cs index d988f3b7..86e0e075 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuSoakTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuSoakTests.cs @@ -1,287 +1,417 @@ -using System; -using System.Collections.Generic; +using System; +using System.Collections.Generic; +using System.Diagnostics; using System.Reflection; -using System.Threading.Tasks; +using System.Threading.Tasks; using BitFaster.Caching.Buffers; -using BitFaster.Caching.Lfu; -using BitFaster.Caching.Scheduler; -using FluentAssertions; -using Xunit; -using Xunit.Abstractions; - -namespace BitFaster.Caching.UnitTests.Lfu -{ - [Collection("Soak")] - public class ConcurrentLfuSoakTests - { - private const int soakIterations = 10; - private const int threads = 4; - private const int loopIterations = 100_000; - - private readonly ITestOutputHelper output; - public ConcurrentLfuSoakTests(ITestOutputHelper testOutputHelper) - { - this.output = testOutputHelper; +using BitFaster.Caching.Lfu; +using BitFaster.Caching.Scheduler; +using FluentAssertions; +using Xunit; +using Xunit.Abstractions; + +namespace BitFaster.Caching.UnitTests.Lfu +{ + [Collection("Soak")] + public class ConcurrentLfuSoakTests + { + private const int soakIterations = 10; + private const int threads = 4; + private const int loopIterations = 100_000; + + private readonly ITestOutputHelper output; + public ConcurrentLfuSoakTests(ITestOutputHelper testOutputHelper) + { + this.output = testOutputHelper; + } + + //Elapsed 411.6918ms - 0.0004116918ns/op + //Cache hits 1689839 (sampled 16.89839%) + //Maintenance ops 31 + [Fact] + public void VerifyHitsWithBackgroundScheduler() + { + var cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + // when running all tests in parallel, sample count drops significantly: set low bar for stability. + VerifyHits(cache, iterations: 10_000_000, minSamples: 250_000); + } + + //Elapsed 590.8154ms - 0.0005908154ns/op + //Cache hits 3441470 (sampled 34.414699999999996%) + //Maintenance ops 20 + [Fact] + public void VerifyHitsWithThreadPoolScheduler() + { + // when running all tests in parallel, sample count drops significantly: set low bar for stability. + var cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default); + VerifyHits(cache, iterations: 10_000_000, minSamples: 500_000); + } + + //Elapsed 273.0148ms - 0.0002730148ns/op + //Cache hits 0 (sampled 0%) + //Maintenance ops 1 + [Fact] + public void VerifyHitsWithNullScheduler() + { + var cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + VerifyHits(cache, iterations: 10_000_000, minSamples: -1); + } + + //Will drop 78125 reads. + //Elapsed 847.5331ms - 0.0008475331ns/op + //Cache hits 10000000 (sampled 99.2248062015504%) + //Maintenance ops 78126 + [Fact] + public void VerifyHitsWithForegroundScheduler() + { + var cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default); + + // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false + // before TryScheduleDrain is called. This serves as sanity check. + int iterations = 10_000_000; + int dropped = iterations / ConcurrentLfu.DefaultBufferSize; + + this.output.WriteLine($"Will drop {dropped} reads."); + + VerifyHits(cache, iterations: iterations + dropped, minSamples: iterations); + } + + [Fact] + public void VerifyMisses() + { + var cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + + int iterations = 100_000; + Func func = x => x; + + var start = Stopwatch.GetTimestamp(); + + for (int i = 0; i < iterations; i++) + { + cache.GetOrAdd(i, func); + } + + var end = Stopwatch.GetTimestamp(); + + cache.DoMaintenance(); + + var totalTicks = end - start; + var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; + var timeNs = timeMs / 1_000_000; + + var timePerOp = timeMs / (double)iterations; + var samplePercent = cache.Metrics.Value.Misses / (double)iterations * 100; + + this.output.WriteLine($"Elapsed {timeMs}ms - {timeNs}ns/op"); + this.output.WriteLine($"Cache misses {cache.Metrics.Value.Misses} (sampled {samplePercent}%)"); + this.output.WriteLine($"Maintenance ops {cache.Scheduler.RunCount}"); + + cache.Metrics.Value.Misses.Should().Be(iterations); + } + + private void VerifyHits(ConcurrentLfu cache, int iterations, int minSamples) + { + Func func = x => x; + cache.GetOrAdd(1, func); + + var start = Stopwatch.GetTimestamp(); + + for (int i = 0; i < iterations; i++) + { + cache.GetOrAdd(1, func); + } + + var end = Stopwatch.GetTimestamp(); + + var totalTicks = end - start; + var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; + var timeNs = timeMs / 1_000_000; + + var timePerOp = timeMs / (double)iterations; + var samplePercent = cache.Metrics.Value.Hits / (double)iterations * 100; + + this.output.WriteLine($"Elapsed {timeMs}ms - {timeNs}ns/op"); + this.output.WriteLine($"Cache hits {cache.Metrics.Value.Hits} (sampled {samplePercent}%)"); + this.output.WriteLine($"Maintenance ops {cache.Scheduler.RunCount}"); + + if (cache.Scheduler.LastException.HasValue) + { + this.output.WriteLine($"Error: {cache.Scheduler.LastException.Value}"); + } + + cache.Metrics.Value.Hits.Should().BeGreaterThanOrEqualTo(minSamples); + + // verify this doesn't block or throw + var b = cache.Scheduler as BackgroundThreadScheduler; + b?.Dispose(); + } + + [Theory] + [Repeat(soakIterations)] + public async Task WhenConcurrentGetCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.Run(threads, () => { + for (int i = 0; i < loopIterations; i++) + { + lfu.GetOrAdd(i + 1, i => i.ToString()); + } + }); + + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] - [Repeat(soakIterations)] - public async Task WhenConcurrentGetCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.Run(threads, () => { - for (int i = 0; i < loopIterations; i++) - { - lfu.GetOrAdd(i + 1, i => i.ToString()); - } - }); - - await RunIntegrityCheckAsync(lfu, iteration); + [Theory] + [Repeat(soakIterations)] + public async Task WhenConcurrentGetAsyncCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.RunAsync(threads, async () => { + for (int i = 0; i < loopIterations; i++) + { + await lfu.GetOrAddAsync(i + 1, i => Task.FromResult(i.ToString())); + } + }); + + await RunIntegrityCheckAsync(lfu, iteration); + } + + [Theory] + [Repeat(soakIterations)] + public async Task WhenConcurrentGetWithArgCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.Run(threads, () => { + for (int i = 0; i < loopIterations; i++) + { + // use the arg overload + lfu.GetOrAdd(i + 1, (i, s) => i.ToString(), "Foo"); + } + }); + + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] + [Theory] [Repeat(soakIterations)] - public async Task WhenConcurrentGetAsyncCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.RunAsync(threads, async () => { - for (int i = 0; i < loopIterations; i++) - { - await lfu.GetOrAddAsync(i + 1, i => Task.FromResult(i.ToString())); - } + public async Task WhenConcurrentGetAsyncWithArgCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.RunAsync(threads, async () => { + for (int i = 0; i < loopIterations; i++) + { + // use the arg overload + await lfu.GetOrAddAsync(i + 1, (i, s) => Task.FromResult(i.ToString()), "Foo"); + } }); - await RunIntegrityCheckAsync(lfu, iteration); + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] - [Repeat(soakIterations)] - public async Task WhenConcurrentGetWithArgCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.Run(threads, () => { - for (int i = 0; i < loopIterations; i++) - { - // use the arg overload - lfu.GetOrAdd(i + 1, (i, s) => i.ToString(), "Foo"); - } - }); - - await RunIntegrityCheckAsync(lfu, iteration); - } - - [Theory] - [Repeat(soakIterations)] - public async Task WhenConcurrentGetAsyncWithArgCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.RunAsync(threads, async () => { - for (int i = 0; i < loopIterations; i++) - { - // use the arg overload - await lfu.GetOrAddAsync(i + 1, (i, s) => Task.FromResult(i.ToString()), "Foo"); - } - }); - - await RunIntegrityCheckAsync(lfu, iteration); + [Theory] + [Repeat(soakIterations)] + public async Task WhenConcurrentGetAndUpdateCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.Run(threads, () => { + for (int i = 0; i < loopIterations; i++) + { + lfu.TryUpdate(i + 1, i.ToString()); + lfu.GetOrAdd(i + 1, i => i.ToString()); + } + }); + + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] - [Repeat(soakIterations)] - public async Task WhenConcurrentGetAndUpdateCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.Run(threads, () => { - for (int i = 0; i < loopIterations; i++) - { - lfu.TryUpdate(i + 1, i.ToString()); - lfu.GetOrAdd(i + 1, i => i.ToString()); - } + [Theory] + [Repeat(soakIterations)] + public async Task WhenSoakConcurrentGetAndRemoveCacheEndsInConsistentState(int iteration) + { + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.Run(threads, () => { + for (int i = 0; i < loopIterations; i++) + { + lfu.TryRemove(i + 1); + lfu.GetOrAdd(i + 1, i => i.ToString()); + } }); - await RunIntegrityCheckAsync(lfu, iteration); + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] - [Repeat(soakIterations)] - public async Task WhenSoakConcurrentGetAndRemoveCacheEndsInConsistentState(int iteration) + [Theory] + [Repeat(soakIterations)] + public async Task WhenConcurrentGetAndRemoveKvpCacheEndsInConsistentState(int iteration) { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.Run(threads, () => { - for (int i = 0; i < loopIterations; i++) - { - lfu.TryRemove(i + 1); - lfu.GetOrAdd(i + 1, i => i.ToString()); - } - }); - - await RunIntegrityCheckAsync(lfu, iteration); + var lfu = CreateWithBackgroundScheduler(); + + await Threaded.Run(threads, () => { + for (int i = 0; i < loopIterations; i++) + { + lfu.TryRemove(new KeyValuePair(i + 1, (i + 1).ToString())); + lfu.GetOrAdd(i + 1, i => i.ToString()); + } + }); + + await RunIntegrityCheckAsync(lfu, iteration); } - [Theory] - [Repeat(soakIterations)] - public async Task WhenConcurrentGetAndRemoveKvpCacheEndsInConsistentState(int iteration) - { - var lfu = CreateWithBackgroundScheduler(); - - await Threaded.Run(threads, () => { - for (int i = 0; i < loopIterations; i++) - { - lfu.TryRemove(new KeyValuePair(i + 1, (i + 1).ToString())); - lfu.GetOrAdd(i + 1, i => i.ToString()); - } + [Fact] + public async Task ThreadedVerifyMisses() + { + // buffer size is 1, this will cause dropped writes on some threads where the buffer is full + var cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + + await Threaded.Run(threads, i => + { + Func func = x => x.ToString(); + + int start = i * loopIterations; + + for (int j = start; j < start + loopIterations; j++) + { + cache.GetOrAdd(j, func); + } }); - await RunIntegrityCheckAsync(lfu, iteration); - } - - [Fact] - public async Task ThreadedVerifyMisses() - { - // buffer size is 1, this will cause dropped writes on some threads where the buffer is full - var cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); - - await Threaded.Run(threads, i => - { - Func func = x => x.ToString(); - - int start = i * loopIterations; - - for (int j = start; j < start + loopIterations; j++) - { - cache.GetOrAdd(j, func); - } - }); - - var samplePercent = cache.Metrics.Value.Misses / (double)loopIterations / threads * 100; - - this.output.WriteLine($"Cache misses {cache.Metrics.Value.Misses} (sampled {samplePercent}%)"); - this.output.WriteLine($"Maintenance ops {cache.Scheduler.RunCount}"); - - cache.Metrics.Value.Misses.Should().Be(loopIterations * threads); - RunIntegrityCheck(cache); + var samplePercent = cache.Metrics.Value.Misses / (double)loopIterations / threads * 100; + + this.output.WriteLine($"Cache misses {cache.Metrics.Value.Misses} (sampled {samplePercent}%)"); + this.output.WriteLine($"Maintenance ops {cache.Scheduler.RunCount}"); + + cache.Metrics.Value.Misses.Should().Be(loopIterations * threads); + RunIntegrityCheck(cache, this.output); } private ConcurrentLfu CreateWithBackgroundScheduler() { - var scheduler = new BackgroundThreadScheduler(); + var scheduler = new BackgroundThreadScheduler(); return new ConcurrentLfuBuilder().WithCapacity(9).WithScheduler(scheduler).Build() as ConcurrentLfu; } private async Task RunIntegrityCheckAsync(ConcurrentLfu lfu, int iteration) { - this.output.WriteLine($"iteration {iteration} keys={string.Join(" ", lfu.Keys)}"); - - var scheduler = lfu.Scheduler as BackgroundThreadScheduler; + this.output.WriteLine($"iteration {iteration} keys={string.Join(" ", lfu.Keys)}"); + + var scheduler = lfu.Scheduler as BackgroundThreadScheduler; scheduler.Dispose(); - await scheduler.Completion; - - RunIntegrityCheck(lfu); + await scheduler.Completion; + + RunIntegrityCheck(lfu, this.output); } - - - private static void RunIntegrityCheck(ConcurrentLfu cache) + + + private static void RunIntegrityCheck(ConcurrentLfu cache, ITestOutputHelper output) { - new ConcurrentLfuIntegrityChecker(cache).Validate(); - } + new ConcurrentLfuIntegrityChecker(cache).Validate(output); + } } - public class ConcurrentLfuIntegrityChecker - { + public class ConcurrentLfuIntegrityChecker + { private readonly ConcurrentLfu cache; - private readonly LfuNodeList windowLru; - private readonly LfuNodeList probationLru; - private readonly LfuNodeList protectedLru; - - private readonly StripedMpscBuffer> readBuffer; - private readonly MpscBoundedBuffer> writeBuffer; - - private static FieldInfo windowLruField = typeof(ConcurrentLfu).GetField("windowLru", BindingFlags.NonPublic | BindingFlags.Instance); - private static FieldInfo probationLruField = typeof(ConcurrentLfu).GetField("probationLru", BindingFlags.NonPublic | BindingFlags.Instance); - private static FieldInfo protectedLruField = typeof(ConcurrentLfu).GetField("protectedLru", BindingFlags.NonPublic | BindingFlags.Instance); - - private static FieldInfo readBufferField = typeof(ConcurrentLfu).GetField("readBuffer", BindingFlags.NonPublic | BindingFlags.Instance); - private static FieldInfo writeBufferField = typeof(ConcurrentLfu).GetField("writeBuffer", BindingFlags.NonPublic | BindingFlags.Instance); - - public ConcurrentLfuIntegrityChecker(ConcurrentLfu cache) - { - this.cache = cache; - - // get lrus via reflection - this.windowLru = (LfuNodeList)windowLruField.GetValue(cache); - this.probationLru = (LfuNodeList)probationLruField.GetValue(cache); - this.protectedLru = (LfuNodeList)protectedLruField.GetValue(cache); - - this.readBuffer = (StripedMpscBuffer>)readBufferField.GetValue(cache); - this.writeBuffer = (MpscBoundedBuffer>)writeBufferField.GetValue(cache); - } - - public void Validate() - { - cache.DoMaintenance(); - - // buffers should be empty after maintenance - this.readBuffer.Count.Should().Be(0); - this.writeBuffer.Count.Should().Be(0); - - // all the items in the LRUs must exist in the dictionary. - // no items should be marked as removed after maintenance has run - VerifyLruInDictionary(this.windowLru); - VerifyLruInDictionary(this.probationLru); - VerifyLruInDictionary(this.protectedLru); - - // all the items in the dictionary must exist in the node list - VerifyDictionaryInLrus(); - - // cache must be within capacity - cache.Count.Should().BeLessThanOrEqualTo(cache.Capacity, "capacity out of valid range"); - } - - private void VerifyLruInDictionary(LfuNodeList lfuNodes) + private readonly LfuNodeList windowLru; + private readonly LfuNodeList probationLru; + private readonly LfuNodeList protectedLru; + + private readonly StripedMpscBuffer> readBuffer; + private readonly MpscBoundedBuffer> writeBuffer; + + private static FieldInfo windowLruField = typeof(ConcurrentLfu).GetField("windowLru", BindingFlags.NonPublic | BindingFlags.Instance); + private static FieldInfo probationLruField = typeof(ConcurrentLfu).GetField("probationLru", BindingFlags.NonPublic | BindingFlags.Instance); + private static FieldInfo protectedLruField = typeof(ConcurrentLfu).GetField("protectedLru", BindingFlags.NonPublic | BindingFlags.Instance); + + private static FieldInfo readBufferField = typeof(ConcurrentLfu).GetField("readBuffer", BindingFlags.NonPublic | BindingFlags.Instance); + private static FieldInfo writeBufferField = typeof(ConcurrentLfu).GetField("writeBuffer", BindingFlags.NonPublic | BindingFlags.Instance); + + public ConcurrentLfuIntegrityChecker(ConcurrentLfu cache) + { + this.cache = cache; + + // get lrus via reflection + this.windowLru = (LfuNodeList)windowLruField.GetValue(cache); + this.probationLru = (LfuNodeList)probationLruField.GetValue(cache); + this.protectedLru = (LfuNodeList)protectedLruField.GetValue(cache); + + this.readBuffer = (StripedMpscBuffer>)readBufferField.GetValue(cache); + this.writeBuffer = (MpscBoundedBuffer>)writeBufferField.GetValue(cache); + } + + public void Validate(ITestOutputHelper output) + { + cache.DoMaintenance(); + + // buffers should be empty after maintenance + this.readBuffer.Count.Should().Be(0); + this.writeBuffer.Count.Should().Be(0); + + // all the items in the LRUs must exist in the dictionary. + // no items should be marked as removed after maintenance has run + VerifyLruInDictionary(this.windowLru, output); + VerifyLruInDictionary(this.probationLru, output); + VerifyLruInDictionary(this.protectedLru, output); + + // all the items in the dictionary must exist in the node list + VerifyDictionaryInLrus(); + + // cache must be within capacity + cache.Count.Should().BeLessThanOrEqualTo(cache.Capacity, "capacity out of valid range"); + } + + private void VerifyLruInDictionary(LfuNodeList lfuNodes, ITestOutputHelper output) { - var node = lfuNodes.First; - - while (node != null) + var node = lfuNodes.First; + + while (node != null) { node.WasRemoved.Should().BeFalse(); node.WasDeleted.Should().BeFalse(); - cache.TryGet(node.Key, out _).Should().BeTrue(); - - node = node.Next; - } - } - + + // This can occur if there is a race between: + // Thread 1: TryRemove, delete node from dictionary, set WasRemoved flag + // Thread 2: Check WasRemoved flag, if not add to lru + // It's not clear how WasRemoved can be false in this situation. + if (!cache.TryGet(node.Key, out _)) + { + output.WriteLine($"Orphaned node with key {node.Key} detected."); + } + + node = node.Next; + } + } + private void VerifyDictionaryInLrus() { foreach (var kvp in this.cache) { - var exists = Exists(kvp, this.windowLru) || Exists(kvp, this.probationLru) || Exists(kvp, this.protectedLru); - exists.Should().BeTrue($"key {kvp.Key} should exist in LRU lists"); - } - } - + var exists = Exists(kvp, this.windowLru) || Exists(kvp, this.probationLru) || Exists(kvp, this.protectedLru); + exists.Should().BeTrue($"key {kvp.Key} must exist in LRU lists"); + } + } + private static bool Exists(KeyValuePair kvp, LfuNodeList lfuNodes) { - var node = lfuNodes.First; - + var node = lfuNodes.First; + while (node != null) { if (EqualityComparer.Default.Equals(node.Key, kvp.Key)) { return true; } - - node = node.Next; - } - - return false; - } - } -} + + node = node.Next; + } + + return false; + } + } +} diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs index e2fae3ff..791c534a 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs @@ -1,702 +1,702 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Threading.Tasks; -using BitFaster.Caching.Buffers; -using BitFaster.Caching.Lfu; -using BitFaster.Caching.Scheduler; -using BitFaster.Caching.UnitTests.Lru; -using FluentAssertions; -using Xunit; -using Xunit.Abstractions; - -namespace BitFaster.Caching.UnitTests.Lfu -{ - public class ConcurrentLfuTests - { - private readonly ITestOutputHelper output; - - private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); - private ValueFactory valueFactory = new ValueFactory(); - - public ConcurrentLfuTests(ITestOutputHelper output) - { - this.output = output; - } - - [Fact] - public void DefaultSchedulerIsThreadPool() - { - var cache = new ConcurrentLfu(20); - cache.Scheduler.Should().BeOfType(); - } - - [Fact] - public void WhenKeyIsRequestedItIsCreatedAndCached() - { - var result1 = cache.GetOrAdd(1, valueFactory.Create); - var result2 = cache.GetOrAdd(1, valueFactory.Create); - - valueFactory.timesCalled.Should().Be(1); - result1.Should().Be(result2); - } - - [Fact] - public void WhenKeyIsRequestedWithArgItIsCreatedAndCached() - { - var result1 = cache.GetOrAdd(1, valueFactory.Create, 9); - var result2 = cache.GetOrAdd(1, valueFactory.Create, 17); - - valueFactory.timesCalled.Should().Be(1); - result1.Should().Be(result2); - } - - [Fact] - public async Task WhenKeyIsRequesteItIsCreatedAndCachedAsync() - { - var result1 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync); - var result2 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync); - - valueFactory.timesCalled.Should().Be(1); - result1.Should().Be(result2); - } - - [Fact] - public async Task WhenKeyIsRequestedWithArgItIsCreatedAndCachedAsync() - { - var result1 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync, 9); - var result2 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync, 17); - - valueFactory.timesCalled.Should().Be(1); - result1.Should().Be(result2); - } - - [Fact] - public void WhenItemsAddedExceedsCapacityItemsAreDiscarded() - { - cache.GetOrAdd(1, k => k); - cache.GetOrAdd(1, k => k); - cache.GetOrAdd(2, k => k); - cache.GetOrAdd(2, k => k); - - for (int i = 0; i < 25; i++) - { - cache.GetOrAdd(i, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - cache.Count.Should().Be(20); - } - - [Fact] - public void WhenItemIsEvictedItIsDisposed() - { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); - var disposables = new DisposableItem[25]; - - for (int i = 0; i < 25; i++) - { - disposables[i] = new DisposableItem(); - dcache.GetOrAdd(i, k => disposables[i]); - } - - dcache.DoMaintenance(); - LogLru(); - - dcache.Count.Should().Be(20); - disposables.Count(d => d.IsDisposed).Should().Be(5); - } - - // protected 15 - // probation 4 - // window 1 - [Fact] - public void WhenNewItemsAreAddedTheyArePromotedBasedOnFrequency() - { - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - for (int i = 0; i < 15; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14] Probation [15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - for (int k = 0; k < 2; k++) - { - for (int j = 0; j < 6; j++) - { - for (int i = 0; i < 15; i++) - { - cache.GetOrAdd(j + 20, k => k); - } - cache.DoMaintenance(); - LogLru(); - } - } - - // Values promoted to probation then protected: - // W[21] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[16, 17, 18, 20] - // W[22] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[17, 18, 20, 21] - // W[23] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[18, 20, 21, 22] - // W[24] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[20, 21, 22, 23] - // W[25] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[20, 21, 22, 23] - // W[25] Protected[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20] Probation[21, 22, 23, 0] - // W[25] Protected[2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21] Probation[22, 23, 0, 1] - // W[25] Protected[3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22] Probation[23, 0, 1, 2] - // W[25] Protected[4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23] Probation[0, 1, 2, 3] - // W[24] Protected[4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23] Probation[1, 2, 3, 25] - // W[24] Protected[5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23, 25] Probation[1, 2, 3, 4] - - cache.Count.Should().Be(20); - - // W [24] Protected [5,6,7,8,9,10,11,12,13,14,20,21,22,23,25] Probation [] - cache.Trim(4); - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(1, out var value1).Should().BeFalse(); - cache.TryGet(2, out var value2).Should().BeFalse(); - cache.Count.Should().Be(16); - } - - [Fact] - public void ReadPromotesProbation() - { - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - // W [19] Protected [16] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,17,18] - cache.GetOrAdd(16, k => k); - cache.DoMaintenance(); - LogLru(); - - for (int i = 25; i < 50; i++) - { - cache.GetOrAdd(i, k => k); - cache.GetOrAdd(i, k => k); - } - - // W [49] Protected [16] Probation [25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42] - cache.DoMaintenance(); - LogLru(); - - cache.Trim(18); - - // W [49] Protected [16] Probation [] - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(16, out var value1).Should().BeTrue(); - } - - // when probation item is written it is moved to protected - [Fact] - public void WritePromotesProbation() - { - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - // W [24] Protected [16] Probation [2,6,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23] - cache.TryUpdate(16, -16).Should().BeTrue(); - cache.DoMaintenance(); - LogLru(); - - for (int i = 25; i < 50; i++) - { - cache.GetOrAdd(i, k => k); - cache.GetOrAdd(i, k => k); - } - - // W [49] Protected [16] Probation [2,6,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23] - cache.DoMaintenance(); - LogLru(); - - cache.Trim(18); - - // W [49] Protected [16] Probation [] - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(16, out var value1).Should().BeTrue(); - } - - [Fact] - public void ReadUpdatesProtectedLruOrder() - { - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - cache.GetOrAdd(7, k => k); - cache.GetOrAdd(8, k => k); - cache.GetOrAdd(9, k => k); - - // W [19] Protected [7,8,9] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - // W [19] Protected [8,9,7] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] - // element 7 now moved to back of LRU - cache.GetOrAdd(7, k => k); - cache.DoMaintenance(); - LogLru(); - - // Trim is LRU order - //W [19] Protected [7] Probation [] - cache.Trim(18); - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(7, out var _).Should().BeTrue(); - } - - [Fact] - public void WriteUpdatesProtectedLruOrder() - { - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - cache.GetOrAdd(7, k => k); - cache.GetOrAdd(8, k => k); - cache.GetOrAdd(9, k => k); - - // W [19] Protected [7,8,9] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - // W [19] Protected [8,9,7] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] - // element 7 now moved to back of LRU - cache.TryUpdate(7, -7).Should().BeTrue(); - cache.DoMaintenance(); - LogLru(); - - // Trim is LRU order - //W [19] Protected [7] Probation [] - cache.Trim(18); - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(7, out var _).Should().BeTrue(); - } - - [Fact] - public void WhenHitRateChangesWindowSizeIsAdapted() - { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); - - // First completely fill the cache, push entries into protected - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - for (int i = 0; i < 15; i++) - { - cache.GetOrAdd(i, k => k); - } - - // W [19] Protected [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14] Probation [15,16,17,18] - cache.DoMaintenance(); - LogLru(); - - // The reset sample size is 200, so do 200 cache hits - // W [19] Protected [12,13,14,15,16,17,18,0,1,2,3,4,5,6,7] Probation [8,9,10,11] - for (int j = 0; j < 10; j++) - for (int i = 0; i < 20; i++) - { - cache.GetOrAdd(i, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - // then miss 200 times - // W [300] Protected [12,13,14,15,16,17,18,0,1,2,3,4,5,6,7] Probation [9,10,11,227] - for (int i = 0; i < 201; i++) - { - cache.GetOrAdd(i + 100, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - // then miss 200 more times (window adaptation +1 window slots) - // W [399,400] Protected [14,15,16,17,18,0,1,2,3,4,5,6,7,227] Probation [9,10,11,12] - for (int i = 0; i < 201; i++) - { - cache.GetOrAdd(i + 200, k => k); - } - - cache.DoMaintenance(); - LogLru(); - - // make 2 requests to new keys, if window is size is now 2 both will exist: - cache.GetOrAdd(666, k => k); - cache.GetOrAdd(667, k => k); - - cache.DoMaintenance(); - LogLru(); - - cache.TryGet(666, out var _).Should().BeTrue(); - cache.TryGet(667, out var _).Should().BeTrue(); - - this.output.WriteLine($"Scheduler ran {cache.Scheduler.RunCount} times."); - } - - [Fact] - public void ReadSchedulesMaintenanceWhenBufferIsFull() - { - var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); - - cache.GetOrAdd(1, k => k); - scheduler.RunCount.Should().Be(1); - cache.DoMaintenance(); - - for (int i = 0; i < ConcurrentLfu.DefaultBufferSize; i++) - { - scheduler.RunCount.Should().Be(1); - cache.GetOrAdd(1, k => k); - } - - // read buffer is now full, next read triggers maintenance - cache.GetOrAdd(1, k => k); - scheduler.RunCount.Should().Be(2); - } - - [Fact] - public void WhenReadBufferIsFullReadsAreDropped() - { - var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); - - cache.GetOrAdd(1, k => k); - scheduler.RunCount.Should().Be(1); - cache.DoMaintenance(); - - for (int i = 0; i < ConcurrentLfu.DefaultBufferSize * 2; i++) - { - cache.GetOrAdd(1, k => k); - } - - cache.DoMaintenance(); - - cache.Metrics.Value.Hits.Should().Be(ConcurrentLfu.DefaultBufferSize); - } - - [Fact] - public void WhenWriteBufferIsFullAddDoesMaintenance() - { - var bufferSize = ConcurrentLfu.DefaultBufferSize; - var scheduler = new TestScheduler(); - - cache = new ConcurrentLfu(1, bufferSize * 2, scheduler, EqualityComparer.Default); - - // add an item, flush write buffer - cache.GetOrAdd(-1, k => k); - cache.DoMaintenance(); - - // remove the item but don't flush, it is now in the write buffer and maintenance is scheduled - cache.TryRemove(-1).Should().BeTrue(); - - // add buffer size items, last iteration will invoke maintenance on the foreground since write - // buffer is full and test scheduler did not do any work - for (int i = 0; i < bufferSize; i++) - { - cache.GetOrAdd(i, k => k); - } - - // pending write (to remove -1) should be flushed by the 128th write calling maintenance - // directly within AfterWrite - cache.TryGet(-1, out var _).Should().BeFalse(); - } - -// backcompat: remove conditional compile -#if NETCOREAPP3_0_OR_GREATER - [Fact] - public void WhenWriteBufferIsFullUpdatesAreDropped() - { - int capacity = 20; - var bufferSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), 128); - var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, capacity, scheduler, EqualityComparer.Default); - - cache.GetOrAdd(1, k => k); - scheduler.RunCount.Should().Be(1); - cache.DoMaintenance(); - - for (int i = 0; i < bufferSize * 2; i++) - { - cache.TryUpdate(1, i); - } - - cache.DoMaintenance(); - - cache.Metrics.Value.Updated.Should().Be(bufferSize); - } -#endif - - [Fact] - public void EvictionPolicyReturnsCapacity() - { - cache.Policy.Eviction.Value.Capacity.Should().Be(20); - } - - [Fact] - public void ExpireAfterWriteIsDisabled() - { - cache.Policy.ExpireAfterWrite.HasValue.Should().BeFalse(); - } - - [Fact] - public void EventsAreDisabled() - { - cache.Events.HasValue.Should().BeFalse(); - } - - [Fact] - public void MetricsAreEnabled() - { - cache.Metrics.HasValue.Should().BeTrue(); - } - - [Fact] - public void WhenItemIsAddedThenRetrievedMetricHitRatioIsHalf() - { - cache.GetOrAdd(1, k => k); - bool result = cache.TryGet(1, out var value); - - cache.DoMaintenance(); - - cache.Metrics.Value.HitRatio.Should().Be(0.5); - cache.Metrics.Value.Hits.Should().Be(1); - cache.Metrics.Value.Misses.Should().Be(1); - } - - [Fact] - public void WhenItemIsEvictedMetricRecordsCount() - { - cache.GetOrAdd(1, k => k); - cache.GetOrAdd(1, k => k); - cache.GetOrAdd(2, k => k); - cache.GetOrAdd(2, k => k); - - for (int i = 0; i < 25; i++) - { - cache.GetOrAdd(i, k => k); - } - - cache.DoMaintenance(); - - cache.Metrics.Value.Evicted.Should().Be(5); - } - - [Fact] - public void WhenItemsAddedKeysContainsTheKeys() - { - cache.Count.Should().Be(0); - cache.GetOrAdd(1, k => k); - cache.GetOrAdd(2, k => k); - cache.Keys.Should().BeEquivalentTo(new[] { 1, 2 }); - } - - [Fact] - public void WhenItemsAddedGenericEnumerateContainsKvps() - { - cache.Count.Should().Be(0); - cache.GetOrAdd(1, k => k + 1); - cache.GetOrAdd(2, k => k + 1); - - cache.Should().BeEquivalentTo(new[] { new KeyValuePair(1, 2), new KeyValuePair(2, 3) }); - } - - [Fact] - public void WhenItemsAddedEnumerateContainsKvps() - { - cache.Count.Should().Be(0); - cache.GetOrAdd(1, k => k + 1); - cache.GetOrAdd(2, k => k + 1); - - var enumerable = (IEnumerable)cache; - enumerable.Should().BeEquivalentTo(new[] { new KeyValuePair(1, 2), new KeyValuePair(2, 3) }); - } - - [Fact] - public void WhenItemIsUpdatedItIsUpdated() - { - cache.GetOrAdd(1, k => k); - cache.AddOrUpdate(1, 2); - - cache.TryGet(1, out var value).Should().BeTrue(); - value.Should().Be(2); - } - - [Fact] - public void WhenItemDoesNotExistUpdatedAddsItem() - { - cache.AddOrUpdate(1, 2); - - cache.TryGet(1, out var value).Should().BeTrue(); - value.Should().Be(2); - } - - [Fact] - public void WhenKeyExistsTryRemoveRemovesItem() - { - cache.GetOrAdd(1, k => k); - - cache.TryRemove(1).Should().BeTrue(); - cache.TryGet(1, out _).Should().BeFalse(); - } - - [Fact] - public void WhenKeyExistsTryRemoveReturnsValue() - { - cache.GetOrAdd(1, valueFactory.Create); - - cache.TryRemove(1, out var value).Should().BeTrue(); - value.Should().Be(1); - } - - [Fact] - public void WhenItemExistsTryRemoveRemovesItem() - { - cache.GetOrAdd(1, k => k); - - cache.TryRemove(new KeyValuePair(1, 1)).Should().BeTrue(); - cache.TryGet(1, out _).Should().BeFalse(); - } - - [Fact] - public void WhenItemDoesntMatchTryRemoveDoesNotRemove() - { - cache.GetOrAdd(1, k => k); - - cache.TryRemove(new KeyValuePair(1, 2)).Should().BeFalse(); - cache.TryGet(1, out var value).Should().BeTrue(); - } - - [Fact] - public void WhenItemIsRemovedItIsDisposed() - { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); - var disposable = new DisposableItem(); - - dcache.GetOrAdd(1, k => disposable); - - dcache.TryRemove(1).Should().BeTrue(); - dcache.DoMaintenance(); - - disposable.IsDisposed.Should().BeTrue(); - } - - [Fact] - public void WhenItemIsRemovedEvictionCountIsIncremented() - { - cache.GetOrAdd(1, k => k); - - cache.TryRemove(1).Should().BeTrue(); - cache.DoMaintenance(); - - cache.Metrics.Value.Evicted.Should().Be(1); - } - - [Fact] - public void WhenItemDoesNotExistTryRemoveIsFalse() - { - cache.TryRemove(1).Should().BeFalse(); - } - - // OnWrite handles the case where a node is removed while the write buffer contains the node - [Fact] - public void WhenRemovedInWriteBuffer() - { - cache.GetOrAdd(1, k => k); - - // wait for the maintenance thread to run, this will attach the new node to the LRU list - cache.DoMaintenance(); - - // pending write in the buffer - cache.TryUpdate(1, 2); - - // immediately remove - cache.TryRemove(1).Should().BeTrue(); - - cache.DoMaintenance(); - - cache.TryGet(1, out var _).Should().BeFalse(); - } - - [Fact] - public void WhenItemDoesNotExistTryUpdateIsFalse() - { - cache.TryUpdate(1, 2).Should().BeFalse(); - } - - [Fact] +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading.Tasks; +using BitFaster.Caching.Buffers; +using BitFaster.Caching.Lfu; +using BitFaster.Caching.Scheduler; +using BitFaster.Caching.UnitTests.Lru; +using FluentAssertions; +using Xunit; +using Xunit.Abstractions; + +namespace BitFaster.Caching.UnitTests.Lfu +{ + public class ConcurrentLfuTests + { + private readonly ITestOutputHelper output; + + private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + private ValueFactory valueFactory = new ValueFactory(); + + public ConcurrentLfuTests(ITestOutputHelper output) + { + this.output = output; + } + + [Fact] + public void DefaultSchedulerIsThreadPool() + { + var cache = new ConcurrentLfu(20); + cache.Scheduler.Should().BeOfType(); + } + + [Fact] + public void WhenKeyIsRequestedItIsCreatedAndCached() + { + var result1 = cache.GetOrAdd(1, valueFactory.Create); + var result2 = cache.GetOrAdd(1, valueFactory.Create); + + valueFactory.timesCalled.Should().Be(1); + result1.Should().Be(result2); + } + + [Fact] + public void WhenKeyIsRequestedWithArgItIsCreatedAndCached() + { + var result1 = cache.GetOrAdd(1, valueFactory.Create, 9); + var result2 = cache.GetOrAdd(1, valueFactory.Create, 17); + + valueFactory.timesCalled.Should().Be(1); + result1.Should().Be(result2); + } + + [Fact] + public async Task WhenKeyIsRequesteItIsCreatedAndCachedAsync() + { + var result1 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync); + var result2 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync); + + valueFactory.timesCalled.Should().Be(1); + result1.Should().Be(result2); + } + + [Fact] + public async Task WhenKeyIsRequestedWithArgItIsCreatedAndCachedAsync() + { + var result1 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync, 9); + var result2 = await cache.GetOrAddAsync(1, valueFactory.CreateAsync, 17); + + valueFactory.timesCalled.Should().Be(1); + result1.Should().Be(result2); + } + + [Fact] + public void WhenItemsAddedExceedsCapacityItemsAreDiscarded() + { + cache.GetOrAdd(1, k => k); + cache.GetOrAdd(1, k => k); + cache.GetOrAdd(2, k => k); + cache.GetOrAdd(2, k => k); + + for (int i = 0; i < 25; i++) + { + cache.GetOrAdd(i, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + cache.Count.Should().Be(20); + } + + [Fact] + public void WhenItemIsEvictedItIsDisposed() + { + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + var disposables = new DisposableItem[25]; + + for (int i = 0; i < 25; i++) + { + disposables[i] = new DisposableItem(); + dcache.GetOrAdd(i, k => disposables[i]); + } + + dcache.DoMaintenance(); + LogLru(); + + dcache.Count.Should().Be(20); + disposables.Count(d => d.IsDisposed).Should().Be(5); + } + + // protected 15 + // probation 4 + // window 1 + [Fact] + public void WhenNewItemsAreAddedTheyArePromotedBasedOnFrequency() + { + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + for (int i = 0; i < 15; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14] Probation [15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + for (int k = 0; k < 2; k++) + { + for (int j = 0; j < 6; j++) + { + for (int i = 0; i < 15; i++) + { + cache.GetOrAdd(j + 20, k => k); + } + cache.DoMaintenance(); + LogLru(); + } + } + + // Values promoted to probation then protected: + // W[21] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[16, 17, 18, 20] + // W[22] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[17, 18, 20, 21] + // W[23] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[18, 20, 21, 22] + // W[24] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[20, 21, 22, 23] + // W[25] Protected[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Probation[20, 21, 22, 23] + // W[25] Protected[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20] Probation[21, 22, 23, 0] + // W[25] Protected[2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21] Probation[22, 23, 0, 1] + // W[25] Protected[3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22] Probation[23, 0, 1, 2] + // W[25] Protected[4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23] Probation[0, 1, 2, 3] + // W[24] Protected[4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23] Probation[1, 2, 3, 25] + // W[24] Protected[5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 20, 21, 22, 23, 25] Probation[1, 2, 3, 4] + + cache.Count.Should().Be(20); + + // W [24] Protected [5,6,7,8,9,10,11,12,13,14,20,21,22,23,25] Probation [] + cache.Trim(4); + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(1, out var value1).Should().BeFalse(); + cache.TryGet(2, out var value2).Should().BeFalse(); + cache.Count.Should().Be(16); + } + + [Fact] + public void ReadPromotesProbation() + { + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + // W [19] Protected [16] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,17,18] + cache.GetOrAdd(16, k => k); + cache.DoMaintenance(); + LogLru(); + + for (int i = 25; i < 50; i++) + { + cache.GetOrAdd(i, k => k); + cache.GetOrAdd(i, k => k); + } + + // W [49] Protected [16] Probation [25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42] + cache.DoMaintenance(); + LogLru(); + + cache.Trim(18); + + // W [49] Protected [16] Probation [] + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(16, out var value1).Should().BeTrue(); + } + + // when probation item is written it is moved to protected + [Fact] + public void WritePromotesProbation() + { + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + // W [24] Protected [16] Probation [2,6,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23] + cache.TryUpdate(16, -16).Should().BeTrue(); + cache.DoMaintenance(); + LogLru(); + + for (int i = 25; i < 50; i++) + { + cache.GetOrAdd(i, k => k); + cache.GetOrAdd(i, k => k); + } + + // W [49] Protected [16] Probation [2,6,7,8,9,10,11,12,13,14,15,17,18,19,20,21,22,23] + cache.DoMaintenance(); + LogLru(); + + cache.Trim(18); + + // W [49] Protected [16] Probation [] + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(16, out var value1).Should().BeTrue(); + } + + [Fact] + public void ReadUpdatesProtectedLruOrder() + { + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + cache.GetOrAdd(7, k => k); + cache.GetOrAdd(8, k => k); + cache.GetOrAdd(9, k => k); + + // W [19] Protected [7,8,9] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + // W [19] Protected [8,9,7] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] + // element 7 now moved to back of LRU + cache.GetOrAdd(7, k => k); + cache.DoMaintenance(); + LogLru(); + + // Trim is LRU order + //W [19] Protected [7] Probation [] + cache.Trim(18); + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(7, out var _).Should().BeTrue(); + } + + [Fact] + public void WriteUpdatesProtectedLruOrder() + { + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + cache.GetOrAdd(7, k => k); + cache.GetOrAdd(8, k => k); + cache.GetOrAdd(9, k => k); + + // W [19] Protected [7,8,9] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + // W [19] Protected [8,9,7] Probation [0,1,2,3,4,5,6,10,11,12,13,14,15,16,17,18] + // element 7 now moved to back of LRU + cache.TryUpdate(7, -7).Should().BeTrue(); + cache.DoMaintenance(); + LogLru(); + + // Trim is LRU order + //W [19] Protected [7] Probation [] + cache.Trim(18); + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(7, out var _).Should().BeTrue(); + } + + [Fact] + public void WhenHitRateChangesWindowSizeIsAdapted() + { + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + + // First completely fill the cache, push entries into protected + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [] Probation [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + for (int i = 0; i < 15; i++) + { + cache.GetOrAdd(i, k => k); + } + + // W [19] Protected [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14] Probation [15,16,17,18] + cache.DoMaintenance(); + LogLru(); + + // The reset sample size is 200, so do 200 cache hits + // W [19] Protected [12,13,14,15,16,17,18,0,1,2,3,4,5,6,7] Probation [8,9,10,11] + for (int j = 0; j < 10; j++) + for (int i = 0; i < 20; i++) + { + cache.GetOrAdd(i, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + // then miss 200 times + // W [300] Protected [12,13,14,15,16,17,18,0,1,2,3,4,5,6,7] Probation [9,10,11,227] + for (int i = 0; i < 201; i++) + { + cache.GetOrAdd(i + 100, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + // then miss 200 more times (window adaptation +1 window slots) + // W [399,400] Protected [14,15,16,17,18,0,1,2,3,4,5,6,7,227] Probation [9,10,11,12] + for (int i = 0; i < 201; i++) + { + cache.GetOrAdd(i + 200, k => k); + } + + cache.DoMaintenance(); + LogLru(); + + // make 2 requests to new keys, if window is size is now 2 both will exist: + cache.GetOrAdd(666, k => k); + cache.GetOrAdd(667, k => k); + + cache.DoMaintenance(); + LogLru(); + + cache.TryGet(666, out var _).Should().BeTrue(); + cache.TryGet(667, out var _).Should().BeTrue(); + + this.output.WriteLine($"Scheduler ran {cache.Scheduler.RunCount} times."); + } + + [Fact] + public void ReadSchedulesMaintenanceWhenBufferIsFull() + { + var scheduler = new TestScheduler(); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); + + cache.GetOrAdd(1, k => k); + scheduler.RunCount.Should().Be(1); + cache.DoMaintenance(); + + for (int i = 0; i < ConcurrentLfu.DefaultBufferSize; i++) + { + scheduler.RunCount.Should().Be(1); + cache.GetOrAdd(1, k => k); + } + + // read buffer is now full, next read triggers maintenance + cache.GetOrAdd(1, k => k); + scheduler.RunCount.Should().Be(2); + } + + [Fact] + public void WhenReadBufferIsFullReadsAreDropped() + { + var scheduler = new TestScheduler(); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); + + cache.GetOrAdd(1, k => k); + scheduler.RunCount.Should().Be(1); + cache.DoMaintenance(); + + for (int i = 0; i < ConcurrentLfu.DefaultBufferSize * 2; i++) + { + cache.GetOrAdd(1, k => k); + } + + cache.DoMaintenance(); + + cache.Metrics.Value.Hits.Should().Be(ConcurrentLfu.DefaultBufferSize); + } + + [Fact] + public void WhenWriteBufferIsFullAddDoesMaintenance() + { + var bufferSize = ConcurrentLfu.DefaultBufferSize; + var scheduler = new TestScheduler(); + + cache = new ConcurrentLfu(1, bufferSize * 2, scheduler, EqualityComparer.Default); + + // add an item, flush write buffer + cache.GetOrAdd(-1, k => k); + cache.DoMaintenance(); + + // remove the item but don't flush, it is now in the write buffer and maintenance is scheduled + cache.TryRemove(-1).Should().BeTrue(); + + // add buffer size items, last iteration will invoke maintenance on the foreground since write + // buffer is full and test scheduler did not do any work + for (int i = 0; i < bufferSize; i++) + { + cache.GetOrAdd(i, k => k); + } + + // pending write (to remove -1) should be flushed by the 128th write calling maintenance + // directly within AfterWrite + cache.TryGet(-1, out var _).Should().BeFalse(); + } + +// backcompat: remove conditional compile +#if NETCOREAPP3_0_OR_GREATER + [Fact] + public void WhenWriteBufferIsFullUpdatesAreDropped() + { + int capacity = 20; + var bufferSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), 128); + var scheduler = new TestScheduler(); + cache = new ConcurrentLfu(1, capacity, scheduler, EqualityComparer.Default); + + cache.GetOrAdd(1, k => k); + scheduler.RunCount.Should().Be(1); + cache.DoMaintenance(); + + for (int i = 0; i < bufferSize * 2; i++) + { + cache.TryUpdate(1, i); + } + + cache.DoMaintenance(); + + cache.Metrics.Value.Updated.Should().Be(bufferSize); + } +#endif + + [Fact] + public void EvictionPolicyReturnsCapacity() + { + cache.Policy.Eviction.Value.Capacity.Should().Be(20); + } + + [Fact] + public void ExpireAfterWriteIsDisabled() + { + cache.Policy.ExpireAfterWrite.HasValue.Should().BeFalse(); + } + + [Fact] + public void EventsAreDisabled() + { + cache.Events.HasValue.Should().BeFalse(); + } + + [Fact] + public void MetricsAreEnabled() + { + cache.Metrics.HasValue.Should().BeTrue(); + } + + [Fact] + public void WhenItemIsAddedThenRetrievedMetricHitRatioIsHalf() + { + cache.GetOrAdd(1, k => k); + bool result = cache.TryGet(1, out var value); + + cache.DoMaintenance(); + + cache.Metrics.Value.HitRatio.Should().Be(0.5); + cache.Metrics.Value.Hits.Should().Be(1); + cache.Metrics.Value.Misses.Should().Be(1); + } + + [Fact] + public void WhenItemIsEvictedMetricRecordsCount() + { + cache.GetOrAdd(1, k => k); + cache.GetOrAdd(1, k => k); + cache.GetOrAdd(2, k => k); + cache.GetOrAdd(2, k => k); + + for (int i = 0; i < 25; i++) + { + cache.GetOrAdd(i, k => k); + } + + cache.DoMaintenance(); + + cache.Metrics.Value.Evicted.Should().Be(5); + } + + [Fact] + public void WhenItemsAddedKeysContainsTheKeys() + { + cache.Count.Should().Be(0); + cache.GetOrAdd(1, k => k); + cache.GetOrAdd(2, k => k); + cache.Keys.Should().BeEquivalentTo(new[] { 1, 2 }); + } + + [Fact] + public void WhenItemsAddedGenericEnumerateContainsKvps() + { + cache.Count.Should().Be(0); + cache.GetOrAdd(1, k => k + 1); + cache.GetOrAdd(2, k => k + 1); + + cache.Should().BeEquivalentTo(new[] { new KeyValuePair(1, 2), new KeyValuePair(2, 3) }); + } + + [Fact] + public void WhenItemsAddedEnumerateContainsKvps() + { + cache.Count.Should().Be(0); + cache.GetOrAdd(1, k => k + 1); + cache.GetOrAdd(2, k => k + 1); + + var enumerable = (IEnumerable)cache; + enumerable.Should().BeEquivalentTo(new[] { new KeyValuePair(1, 2), new KeyValuePair(2, 3) }); + } + + [Fact] + public void WhenItemIsUpdatedItIsUpdated() + { + cache.GetOrAdd(1, k => k); + cache.AddOrUpdate(1, 2); + + cache.TryGet(1, out var value).Should().BeTrue(); + value.Should().Be(2); + } + + [Fact] + public void WhenItemDoesNotExistUpdatedAddsItem() + { + cache.AddOrUpdate(1, 2); + + cache.TryGet(1, out var value).Should().BeTrue(); + value.Should().Be(2); + } + + [Fact] + public void WhenKeyExistsTryRemoveRemovesItem() + { + cache.GetOrAdd(1, k => k); + + cache.TryRemove(1).Should().BeTrue(); + cache.TryGet(1, out _).Should().BeFalse(); + } + + [Fact] + public void WhenKeyExistsTryRemoveReturnsValue() + { + cache.GetOrAdd(1, valueFactory.Create); + + cache.TryRemove(1, out var value).Should().BeTrue(); + value.Should().Be(1); + } + + [Fact] + public void WhenItemExistsTryRemoveRemovesItem() + { + cache.GetOrAdd(1, k => k); + + cache.TryRemove(new KeyValuePair(1, 1)).Should().BeTrue(); + cache.TryGet(1, out _).Should().BeFalse(); + } + + [Fact] + public void WhenItemDoesntMatchTryRemoveDoesNotRemove() + { + cache.GetOrAdd(1, k => k); + + cache.TryRemove(new KeyValuePair(1, 2)).Should().BeFalse(); + cache.TryGet(1, out var value).Should().BeTrue(); + } + + [Fact] + public void WhenItemIsRemovedItIsDisposed() + { + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + var disposable = new DisposableItem(); + + dcache.GetOrAdd(1, k => disposable); + + dcache.TryRemove(1).Should().BeTrue(); + dcache.DoMaintenance(); + + disposable.IsDisposed.Should().BeTrue(); + } + + [Fact] + public void WhenItemIsRemovedEvictionCountIsIncremented() + { + cache.GetOrAdd(1, k => k); + + cache.TryRemove(1).Should().BeTrue(); + cache.DoMaintenance(); + + cache.Metrics.Value.Evicted.Should().Be(1); + } + + [Fact] + public void WhenItemDoesNotExistTryRemoveIsFalse() + { + cache.TryRemove(1).Should().BeFalse(); + } + + // OnWrite handles the case where a node is removed while the write buffer contains the node + [Fact] + public void WhenRemovedInWriteBuffer() + { + cache.GetOrAdd(1, k => k); + + // wait for the maintenance thread to run, this will attach the new node to the LRU list + cache.DoMaintenance(); + + // pending write in the buffer + cache.TryUpdate(1, 2); + + // immediately remove + cache.TryRemove(1).Should().BeTrue(); + + cache.DoMaintenance(); + + cache.TryGet(1, out var _).Should().BeFalse(); + } + + [Fact] + public void WhenItemDoesNotExistTryUpdateIsFalse() + { + cache.TryUpdate(1, 2).Should().BeFalse(); + } + + [Fact] public void WhenAddingNullValueCanBeAddedAndRemoved() { // use foreground so that any null ref exceptions will surface var lfu = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default); lfu.GetOrAdd(1, _ => null).Should().BeNull(); lfu.AddOrUpdate(1, null); - lfu.TryRemove(1).Should().BeTrue(); - } - - [Fact] - public void WhenClearedCacheIsEmpty() + lfu.TryRemove(1).Should().BeTrue(); + } + + [Fact] + public void WhenClearedCacheIsEmpty() { - cache.GetOrAdd(1, k => k); + cache.GetOrAdd(1, k => k); cache.GetOrAdd(2, k => k); - cache.Clear(); - - cache.Count.Should().Be(0); - cache.TryGet(1, out var _).Should().BeFalse(); - } - - [Fact] + cache.Clear(); + + cache.Count.Should().Be(0); + cache.TryGet(1, out var _).Should().BeFalse(); + } + + [Fact] public void WhenBackgroundMaintenanceRepeatedReadThenClearResultsInEmpty() { cache = new ConcurrentLfu(1, 40, new BackgroundThreadScheduler(), EqualityComparer.Default); @@ -715,204 +715,84 @@ public void WhenBackgroundMaintenanceRepeatedReadThenClearResultsInEmpty() // there should be no iteration of the loop where count != 0 overflow.Should().Be(0); - } - - [Fact] - public void TrimRemovesNItems() - { - for (int i = 0; i < 25; i++) - { - cache.GetOrAdd(i, k => k); - } - cache.DoMaintenance(); - - cache.Count.Should().Be(20); - - cache.Trim(5); - cache.DoMaintenance(); - - cache.Count.Should().Be(15); - } - - [Fact] - public void TrimWhileItemsInWriteBufferRemovesNItems() - { - // null scheduler == no maintenance, all writes fit in buffer - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); - - for (int i = 0; i < 25; i++) - { - cache.GetOrAdd(i, k => k); - } - - // Trim implicitly performs maintenance - cache.Trim(5); - - cache.DoMaintenance(); - - // The trim takes effect before all the writes are replayed by the maintenance thread. - cache.Metrics.Value.Evicted.Should().Be(10); - cache.Count.Should().Be(15); - - this.output.WriteLine($"Count {cache.Count}"); - this.output.WriteLine($"Keys {string.Join(",", cache.Keys.Select(k => k.ToString()))}"); - - } - - //Elapsed 411.6918ms - 0.0004116918ns/op - //Cache hits 1689839 (sampled 16.89839%) - //Maintenance ops 31 - [Fact] - public void VerifyHitsWithBackgroundScheduler() - { - // when running all tests in parallel, sample count drops significantly: set low bar for stability. - VerifyHits(iterations: 10_000_000, minSamples: 250_000); - } - - //Elapsed 590.8154ms - 0.0005908154ns/op - //Cache hits 3441470 (sampled 34.414699999999996%) - //Maintenance ops 20 - [Fact] - public void VerifyHitsWithThreadPoolScheduler() - { - // when running all tests in parallel, sample count drops significantly: set low bar for stability. - cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default); - VerifyHits(iterations: 10_000_000, minSamples: 500_000); - } - - //Elapsed 273.0148ms - 0.0002730148ns/op - //Cache hits 0 (sampled 0%) - //Maintenance ops 1 - [Fact] - public void VerifyHitsWithNullScheduler() - { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); - VerifyHits(iterations: 10_000_000, minSamples: -1); - } - - //Will drop 78125 reads. - //Elapsed 847.5331ms - 0.0008475331ns/op - //Cache hits 10000000 (sampled 99.2248062015504%) - //Maintenance ops 78126 - [Fact] - public void VerifyHitsWithForegroundScheduler() - { - cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default); - - // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false - // before TryScheduleDrain is called. This serves as sanity check. - int iterations = 10_000_000; - int dropped = iterations / ConcurrentLfu.DefaultBufferSize; - - this.output.WriteLine($"Will drop {dropped} reads."); - - VerifyHits(iterations: iterations + dropped, minSamples: iterations); - } - - [Fact] - public void VerifyMisses() - { - cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); - - int iterations = 100_000; - Func func = x => x; - - var start = Stopwatch.GetTimestamp(); - - for (int i = 0; i < iterations; i++) - { - cache.GetOrAdd(i, func); - } - - var end = Stopwatch.GetTimestamp(); - - cache.DoMaintenance(); - - var totalTicks = end - start; - var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; - var timeNs = timeMs / 1_000_000; - - var timePerOp = timeMs / (double)iterations; - var samplePercent = this.cache.Metrics.Value.Misses / (double)iterations * 100; - - this.output.WriteLine($"Elapsed {timeMs}ms - {timeNs}ns/op"); - this.output.WriteLine($"Cache misses {this.cache.Metrics.Value.Misses} (sampled {samplePercent}%)"); - this.output.WriteLine($"Maintenance ops {this.cache.Scheduler.RunCount}"); - - cache.Metrics.Value.Misses.Should().Be(iterations); - } - - private void VerifyHits(int iterations, int minSamples) - { - Func func = x => x; - cache.GetOrAdd(1, func); - - var start = Stopwatch.GetTimestamp(); - - for (int i = 0; i < iterations; i++) - { - cache.GetOrAdd(1, func); - } - - var end = Stopwatch.GetTimestamp(); - - var totalTicks = end - start; - var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; - var timeNs = timeMs / 1_000_000; - - var timePerOp = timeMs / (double)iterations; - var samplePercent = this.cache.Metrics.Value.Hits / (double)iterations * 100; - - this.output.WriteLine($"Elapsed {timeMs}ms - {timeNs}ns/op"); - this.output.WriteLine($"Cache hits {this.cache.Metrics.Value.Hits} (sampled {samplePercent}%)"); - this.output.WriteLine($"Maintenance ops {this.cache.Scheduler.RunCount}"); - - if (this.cache.Scheduler.LastException.HasValue) - { - this.output.WriteLine($"Error: {this.cache.Scheduler.LastException.Value}"); - } - - cache.Metrics.Value.Hits.Should().BeGreaterThanOrEqualTo(minSamples); - - // verify this doesn't block or throw - var b = cache.Scheduler as BackgroundThreadScheduler; - b?.Dispose(); - } - - private void LogLru() - { -#if DEBUG - this.output.WriteLine(cache.FormatLfuString()); -#endif - } - - public class ValueFactory - { - public int timesCalled; - - public int Create(int key) - { - timesCalled++; - return key; - } - - public int Create(int key, int arg) - { - timesCalled++; - return key + arg; - } - - public Task CreateAsync(int key) - { - timesCalled++; - return Task.FromResult(key); - } - - public Task CreateAsync(int key, int arg) - { - timesCalled++; - return Task.FromResult(key + arg); - } - } - } -} + } + + [Fact] + public void TrimRemovesNItems() + { + for (int i = 0; i < 25; i++) + { + cache.GetOrAdd(i, k => k); + } + cache.DoMaintenance(); + + cache.Count.Should().Be(20); + + cache.Trim(5); + cache.DoMaintenance(); + + cache.Count.Should().Be(15); + } + + [Fact] + public void TrimWhileItemsInWriteBufferRemovesNItems() + { + // null scheduler == no maintenance, all writes fit in buffer + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + + for (int i = 0; i < 25; i++) + { + cache.GetOrAdd(i, k => k); + } + + // Trim implicitly performs maintenance + cache.Trim(5); + + cache.DoMaintenance(); + + // The trim takes effect before all the writes are replayed by the maintenance thread. + cache.Metrics.Value.Evicted.Should().Be(10); + cache.Count.Should().Be(15); + + this.output.WriteLine($"Count {cache.Count}"); + this.output.WriteLine($"Keys {string.Join(",", cache.Keys.Select(k => k.ToString()))}"); + + } + + private void LogLru() + { +#if DEBUG + this.output.WriteLine(cache.FormatLfuString()); +#endif + } + + public class ValueFactory + { + public int timesCalled; + + public int Create(int key) + { + timesCalled++; + return key; + } + + public int Create(int key, int arg) + { + timesCalled++; + return key + arg; + } + + public Task CreateAsync(int key) + { + timesCalled++; + return Task.FromResult(key); + } + + public Task CreateAsync(int key, int arg) + { + timesCalled++; + return Task.FromResult(key + arg); + } + } + } +} diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruAfterAccessTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruAfterAccessTests.cs index e7f753fb..ca793cc2 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruAfterAccessTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruAfterAccessTests.cs @@ -1,9 +1,6 @@ using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Runtime.InteropServices; -using System.Threading; -using System.Threading.Tasks; +using System.Collections.Generic; +using System.Runtime.InteropServices; using BitFaster.Caching.Lru; using FluentAssertions; using Xunit; @@ -57,25 +54,40 @@ public void WhenItemIsNotExpiredItIsNotRemoved() } [Fact] - public async Task WhenItemIsExpiredItIsRemoved() - { - lru.GetOrAdd(1, valueFactory.Create); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.TryGet(1, out var value).Should().BeFalse(); + public void WhenItemIsExpiredItIsRemoved() + { + Timed.Execute( + lru, + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.TryGet(1, out var value).Should().BeFalse(); + } + ); } [Fact] - public async Task WhenItemIsUpdatedTtlIsExtended() + public void WhenItemIsUpdatedTtlIsExtended() { - lru.GetOrAdd(1, valueFactory.Create); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.TryUpdate(1, "3"); - - lru.TryGet(1, out var value).Should().BeTrue(); + Timed.Execute( + lru, + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.TryUpdate(1, "3"); + lru.TryGet(1, out var value).Should().BeTrue(); + } + ); } // Using async/await makes this very unstable due to xunit @@ -84,36 +96,30 @@ public async Task WhenItemIsUpdatedTtlIsExtended() [Fact] public void WhenItemIsReadTtlIsExtended() { - int attempts = 0; - while (true) - { - var sw = Stopwatch.StartNew(); - - lru = new ConcurrentLruBuilder() - .WithCapacity(capacity) - .WithExpireAfterAccess(TimeSpan.FromMilliseconds(100)) - .Build(); - - lru.GetOrAdd(1, valueFactory.Create); - - Thread.Sleep(50); - - if (sw.Elapsed < TimeSpan.FromMilliseconds(75)) + Timed.Execute( + capacity, + cap => { - lru.TryGet(1, out _).Should().BeTrue($"First {sw.Elapsed}"); - - Thread.Sleep(75); + var lru = new ConcurrentLruBuilder() + .WithCapacity(cap) + .WithExpireAfterAccess(TimeSpan.FromMilliseconds(100)) + .Build(); - if (sw.Elapsed < TimeSpan.FromMilliseconds(150)) - { - lru.TryGet(1, out var value).Should().BeTrue($"Second {sw.Elapsed}"); - break; - } - } - - Thread.Sleep(200); - attempts++.Should().BeLessThan(128, "Unable to run test within verification margin"); - } + lru.GetOrAdd(1, valueFactory.Create); + + return lru; + }, + TimeSpan.FromMilliseconds(50), + lru => + { + lru.TryGet(1, out _).Should().BeTrue($"First"); + }, + TimeSpan.FromMilliseconds(75), + lru => + { + lru.TryGet(1, out var value).Should().BeTrue($"Second"); + } + ); } [Fact] @@ -148,64 +154,91 @@ public void WhenValueEvictedItemRemovedEventIsFired() } [Fact] - public async Task WhenItemsAreExpiredExpireRemovesExpiredItems() + public void WhenItemsAreExpiredExpireRemovesExpiredItems() { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - lru.GetOrAdd(1, valueFactory.Create); - lru.GetOrAdd(2, valueFactory.Create); - lru.GetOrAdd(3, valueFactory.Create); - - lru.AddOrUpdate(4, "4"); - lru.AddOrUpdate(5, "5"); - lru.AddOrUpdate(6, "6"); - - lru.AddOrUpdate(7, "7"); - lru.AddOrUpdate(8, "8"); - lru.AddOrUpdate(9, "9"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.Policy.ExpireAfterAccess.Value.TrimExpired(); - - lru.Count.Should().Be(0); + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + lru.GetOrAdd(1, valueFactory.Create); + lru.GetOrAdd(2, valueFactory.Create); + lru.GetOrAdd(3, valueFactory.Create); + + lru.AddOrUpdate(4, "4"); + lru.AddOrUpdate(5, "5"); + lru.AddOrUpdate(6, "6"); + + lru.AddOrUpdate(7, "7"); + lru.AddOrUpdate(8, "8"); + lru.AddOrUpdate(9, "9"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.Policy.ExpireAfterAccess.Value.TrimExpired(); + + lru.Count.Should().Be(0); + } + ); } [Fact] - public async Task WhenCacheHasExpiredAndFreshItemsExpireRemovesOnlyExpiredItems() - { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - - lru.AddOrUpdate(4, "4"); - lru.AddOrUpdate(5, "5"); - lru.AddOrUpdate(6, "6"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.GetOrAdd(1, valueFactory.Create); - lru.GetOrAdd(2, valueFactory.Create); - lru.GetOrAdd(3, valueFactory.Create); - - lru.Policy.ExpireAfterAccess.Value.TrimExpired(); - - lru.Count.Should().Be(3); + public void WhenCacheHasExpiredAndFreshItemsExpireRemovesOnlyExpiredItems() + { + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + + lru.AddOrUpdate(4, "4"); + lru.AddOrUpdate(5, "5"); + lru.AddOrUpdate(6, "6"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + lru.GetOrAdd(2, valueFactory.Create); + lru.GetOrAdd(3, valueFactory.Create); + + lru.Policy.ExpireAfterAccess.Value.TrimExpired(); + + lru.Count.Should().Be(3); + } + ); } [Fact] - public async Task WhenItemsAreExpiredTrimRemovesExpiredItems() - { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.Policy.Eviction.Value.Trim(1); - - lru.Count.Should().Be(0); + public void WhenItemsAreExpiredTrimRemovesExpiredItems() + { + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.Policy.Eviction.Value.Trim(1); + + lru.Count.Should().Be(0); + } + ); } } } diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs index d12eab39..38447f91 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs @@ -5,6 +5,7 @@ using System.Threading.Tasks; using Xunit; using System.Runtime.InteropServices; +using System.Threading; namespace BitFaster.Caching.UnitTests.Lru { @@ -54,25 +55,40 @@ public void WhenItemIsNotExpiredItIsNotRemoved() } [Fact] - public async Task WhenItemIsExpiredItIsRemoved() + public void WhenItemIsExpiredItIsRemoved() { - lru.GetOrAdd(1, valueFactory.Create); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.TryGet(1, out var value).Should().BeFalse(); + Timed.Execute( + lru, + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.TryGet(1, out var value).Should().BeFalse(); + } + ); } [Fact] - public async Task WhenItemIsUpdatedTtlIsExtended() + public void WhenItemIsUpdatedTtlIsExtended() { - lru.GetOrAdd(1, valueFactory.Create); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.TryUpdate(1, "3"); - - lru.TryGet(1, out var value).Should().BeTrue(); + Timed.Execute( + lru, + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.TryUpdate(1, "3"); + lru.TryGet(1, out var value).Should().BeTrue(); + } + ); } [Fact] @@ -117,64 +133,91 @@ public void WhenItemRemovedEventIsUnregisteredEventIsNotFired() } [Fact] - public async Task WhenItemsAreExpiredExpireRemovesExpiredItems() + public void WhenItemsAreExpiredExpireRemovesExpiredItems() { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - lru.GetOrAdd(1, valueFactory.Create); - lru.GetOrAdd(2, valueFactory.Create); - lru.GetOrAdd(3, valueFactory.Create); - - lru.AddOrUpdate(4, "4"); - lru.AddOrUpdate(5, "5"); - lru.AddOrUpdate(6, "6"); - - lru.AddOrUpdate(7, "7"); - lru.AddOrUpdate(8, "8"); - lru.AddOrUpdate(9, "9"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.Policy.ExpireAfterWrite.Value.TrimExpired(); - - lru.Count.Should().Be(0); + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + lru.GetOrAdd(1, valueFactory.Create); + lru.GetOrAdd(2, valueFactory.Create); + lru.GetOrAdd(3, valueFactory.Create); + + lru.AddOrUpdate(4, "4"); + lru.AddOrUpdate(5, "5"); + lru.AddOrUpdate(6, "6"); + + lru.AddOrUpdate(7, "7"); + lru.AddOrUpdate(8, "8"); + lru.AddOrUpdate(9, "9"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.Policy.ExpireAfterWrite.Value.TrimExpired(); + + lru.Count.Should().Be(0); + } + ); } [Fact] - public async Task WhenCacheHasExpiredAndFreshItemsExpireRemovesOnlyExpiredItems() + public void WhenCacheHasExpiredAndFreshItemsExpireRemovesOnlyExpiredItems() { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - - lru.AddOrUpdate(4, "4"); - lru.AddOrUpdate(5, "5"); - lru.AddOrUpdate(6, "6"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.GetOrAdd(1, valueFactory.Create); - lru.GetOrAdd(2, valueFactory.Create); - lru.GetOrAdd(3, valueFactory.Create); - - lru.Policy.ExpireAfterWrite.Value.TrimExpired(); - - lru.Count.Should().Be(3); + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + + lru.AddOrUpdate(4, "4"); + lru.AddOrUpdate(5, "5"); + lru.AddOrUpdate(6, "6"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.GetOrAdd(1, valueFactory.Create); + lru.GetOrAdd(2, valueFactory.Create); + lru.GetOrAdd(3, valueFactory.Create); + + lru.Policy.ExpireAfterWrite.Value.TrimExpired(); + + lru.Count.Should().Be(3); + } + ); } [Fact] - public async Task WhenItemsAreExpiredTrimRemovesExpiredItems() + public void WhenItemsAreExpiredTrimRemovesExpiredItems() { - lru.AddOrUpdate(1, "1"); - lru.AddOrUpdate(2, "2"); - lru.AddOrUpdate(3, "3"); - - await Task.Delay(timeToLive.MultiplyBy(ttlWaitMlutiplier)); - - lru.Policy.Eviction.Value.Trim(1); - - lru.Count.Should().Be(0); + Timed.Execute( + lru, + lru => + { + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "2"); + lru.AddOrUpdate(3, "3"); + + return lru; + }, + timeToLive.MultiplyBy(ttlWaitMlutiplier), + lru => + { + lru.Policy.Eviction.Value.Trim(1); + + lru.Count.Should().Be(0); + } + ); } } diff --git a/BitFaster.Caching.UnitTests/Timed.cs b/BitFaster.Caching.UnitTests/Timed.cs new file mode 100644 index 00000000..41ea9bc1 --- /dev/null +++ b/BitFaster.Caching.UnitTests/Timed.cs @@ -0,0 +1,73 @@ +using System; +using System.Diagnostics; +using System.Threading; +using FluentAssertions; + +namespace BitFaster.Caching.UnitTests +{ + + /// + /// An execution orchestrator for running timed integration tests. This is useful to verify + /// correct expiry behavior in an end to end integration test with real code. + /// This class uses Thread.Sleep instead of async/await because xunit will + /// aggressively run tests on any await yielding thread making timing unpredictable. + /// + public class Timed + { + /// + /// Executes two test steps with a pause in between. The total time of the test is constrained + /// such that each step + pause is guaranteed to execute within 25ms precision. + /// + public static void Execute(TArg arg, Func first, TimeSpan pause, Action second) + { + int attempts = 0; + while (true) + { + var sw = Stopwatch.StartNew(); + + var state = first(arg); + Thread.Sleep(pause); + + if (sw.Elapsed < pause + TimeSpan.FromMilliseconds(25)) + { + second(state); + return; + } + + Thread.Sleep(200); + attempts++.Should().BeLessThan(128, "Unable to run test within verification margin"); + } + } + + /// + /// Executes three test steps pauses in between. The total time of the test is constrained + /// such that each step + pause is guaranteed to execute within 25ms precision. + /// + public static void Execute(TArg arg, Func first, TimeSpan pause1, Action second, TimeSpan pause2, Action third) + { + int attempts = 0; + while (true) + { + var sw = Stopwatch.StartNew(); + + var state = first(arg); + Thread.Sleep(pause1); + + if (sw.Elapsed < pause1 + TimeSpan.FromMilliseconds(25)) + { + second(state); + Thread.Sleep(pause2); + + if (sw.Elapsed < pause1 + pause2 + TimeSpan.FromMilliseconds(25)) + { + third(state); + return; + } + } + + Thread.Sleep(200); + attempts++.Should().BeLessThan(128, "Unable to run test within verification margin"); + } + } + } +}