From 8ef56b03c70d0db37c1c96353ab20b86a4548b72 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Mon, 6 Nov 2023 17:36:12 -0800 Subject: [PATCH 1/4] Fix LRU clear when warm/hot items are touched --- .../Lru/ConcurrentLruTests.cs | 39 +++++++++++++++++++ BitFaster.Caching/Lru/ConcurrentLruCore.cs | 34 +++++++--------- 2 files changed, 53 insertions(+), 20 deletions(-) diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs index 5395700e..caa2b4dd 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs @@ -875,6 +875,45 @@ public void WhenItemsExistClearRemovesAllItems() lru.ColdCount.Should().Be(0); } + [Theory] + [InlineData(1)] + [InlineData(2)] + [InlineData(3)] + [InlineData(4)] + [InlineData(5)] + [InlineData(6)] + [InlineData(7)] + [InlineData(8)] + [InlineData(9)] + [InlineData(10)] + public void WhenItemsExistAndItemsAccessedClearRemovesAllItems(int itemCount) + { + // By default capacity is 9. Test all possible states of touched items + // in the cache. + + for (int i = 0; i < itemCount; i++) + { + lru.AddOrUpdate(i, "1"); + } + + // touch n items + for (int i = 0; i < itemCount; i++) + { + lru.TryGet(i, out _); + } + + lru.Clear(); + + this.testOutputHelper.WriteLine("LRU " + string.Join(" ", lru.Keys)); + + lru.Count.Should().Be(0); + + // verify queues are purged + lru.HotCount.Should().Be(0); + lru.WarmCount.Should().Be(0); + lru.ColdCount.Should().Be(0); + } + [Fact] public void WhenWarmThenClearedIsWarmIsReset() { diff --git a/BitFaster.Caching/Lru/ConcurrentLruCore.cs b/BitFaster.Caching/Lru/ConcurrentLruCore.cs index c4024929..72267737 100644 --- a/BitFaster.Caching/Lru/ConcurrentLruCore.cs +++ b/BitFaster.Caching/Lru/ConcurrentLruCore.cs @@ -422,15 +422,7 @@ public void AddOrUpdate(K key, V value) /// public void Clear() { - int count = this.Count; - - for (int i = 0; i < count; i++) - { - CycleHotUnchecked(ItemRemovedReason.Cleared); - CycleWarmUnchecked(ItemRemovedReason.Cleared); - TryRemoveCold(ItemRemovedReason.Cleared); - } - + this.TrimLiveItems(itemsRemoved: 0, this.Count, ItemRemovedReason.Cleared); Volatile.Write(ref this.isWarm, false); } @@ -458,7 +450,7 @@ public void Trim(int itemCount) // first scan each queue for discardable items and remove them immediately. Note this can remove > itemCount items. int itemsRemoved = this.itemPolicy.CanDiscard() ? TrimAllDiscardedItems() : 0; - TrimLiveItems(itemsRemoved, itemCount); + TrimLiveItems(itemsRemoved, itemCount, ItemRemovedReason.Trimmed); } private void TrimExpired() @@ -507,42 +499,44 @@ void RemoveDiscardableItems(ConcurrentQueue q, ref int queueCounter) return itemsRemoved; } - private void TrimLiveItems(int itemsRemoved, int itemCount) + private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason) { + // When items are touched, they are moved to warm by cycling. Therefore, to guarantee + // that we can remove itemCount items, we must cycle capacity.Warm + capacity.Hot times. // If clear is called during trimming, it would be possible to get stuck in an infinite - // loop here. Instead quit after n consecutive failed attempts to move warm/hot to cold. + // loop here. The warm + hot limit also guards against this case. int trimWarmAttempts = 0; - int maxAttempts = this.capacity.Cold + 1; + int maxWarmHotAttempts = this.capacity.Warm + this.capacity.Hot; - while (itemsRemoved < itemCount && trimWarmAttempts < maxAttempts) + while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts) { if (Volatile.Read(ref this.counter.cold) > 0) { - if (TryRemoveCold(ItemRemovedReason.Trimmed) == (ItemDestination.Remove, 0)) + if (TryRemoveCold(reason) == (ItemDestination.Remove, 0)) { itemsRemoved++; trimWarmAttempts = 0; } - TrimWarmOrHot(); + TrimWarmOrHot(reason); } else { - TrimWarmOrHot(); + TrimWarmOrHot(reason); trimWarmAttempts++; } } } - private void TrimWarmOrHot() + private void TrimWarmOrHot(ItemRemovedReason reason) { if (Volatile.Read(ref this.counter.warm) > 0) { - CycleWarmUnchecked(ItemRemovedReason.Trimmed); + CycleWarmUnchecked(reason); } else if (Volatile.Read(ref this.counter.hot) > 0) { - CycleHotUnchecked(ItemRemovedReason.Trimmed); + CycleHotUnchecked(reason); } } From cfed3aecb2742c2ba4f4bf8a0d0d19c476a7ac4e Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Mon, 6 Nov 2023 17:49:57 -0800 Subject: [PATCH 2/4] add trim test --- .../Lru/ConcurrentLruTests.cs | 40 ++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs index caa2b4dd..142ee92d 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs @@ -9,7 +9,6 @@ using Xunit.Abstractions; using System.Collections.Concurrent; using System.Reflection; -using System.Runtime.CompilerServices; namespace BitFaster.Caching.UnitTests.Lru { @@ -1118,6 +1117,45 @@ public void WhenColdItemsAreTouchedTrimRemovesExpectedItemCount(int trimCount, i this.testOutputHelper.WriteLine("exp " + string.Join(" ", expected)); lru.Keys.Should().BeEquivalentTo(expected); + } + + [Theory] + [InlineData(1)] + [InlineData(2)] + [InlineData(3)] + [InlineData(4)] + [InlineData(5)] + [InlineData(6)] + [InlineData(7)] + [InlineData(8)] + [InlineData(9)] + [InlineData(10)] + public void WhenItemsExistAndItemsAccessedTrimRemovesAllItems(int itemCount) + { + // By default capacity is 9. Test all possible states of touched items + // in the cache. + + for (int i = 0; i < itemCount; i++) + { + lru.AddOrUpdate(i, "1"); + } + + // touch n items + for (int i = 0; i < itemCount; i++) + { + lru.TryGet(i, out _); + } + + lru.Trim(Math.Min(itemCount, lru.Capacity)); + + this.testOutputHelper.WriteLine("LRU " + string.Join(" ", lru.Keys)); + + lru.Count.Should().Be(0); + + // verify queues are purged + lru.HotCount.Should().Be(0); + lru.WarmCount.Should().Be(0); + lru.ColdCount.Should().Be(0); } [Fact] From e48c58df543f55dd95f4146422effcf640c25590 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Mon, 6 Nov 2023 18:05:15 -0800 Subject: [PATCH 3/4] special case --- .../Lru/ConcurrentTLruTests.cs | 64 +++++++++++++++++++ BitFaster.Caching/Lru/ConcurrentLruCore.cs | 4 +- 2 files changed, 66 insertions(+), 2 deletions(-) diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs index 16822234..9da3f670 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs @@ -218,5 +218,69 @@ protected override ICache CreateTLru(ICapacityPartition capacity, Ti // backcompat: use TlruStopwatchPolicy return new ConcurrentLruCore, TLruLongTicksPolicy, TelemetryPolicy>(1, capacity, EqualityComparer.Default, new TLruLongTicksPolicy(timeToLive), default); } + +#if NET6_0_OR_GREATER + private record FakeCacheKey(int SomeProperty); + private record FakeCacheValue + { + public int SomeProperty { get; set; } + }; + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void WhenClearingCache_ShouldActuallyClearCache(bool shouldClearTwice) + { + var cache = new ConcurrentTLru(3, TimeSpan.FromMinutes(10)); + var keyOne = new FakeCacheKey(1); + var keyTwo = new FakeCacheKey(2); + var cacheValue = new FakeCacheValue(); + + cache.AddOrUpdate(keyOne, cacheValue); + cache.AddOrUpdate(keyTwo, cacheValue); + + cache.TryGet(keyOne, out var retrievedKeyValueOne); + retrievedKeyValueOne.Should().BeSameAs(cacheValue); + cache.TryGet(keyTwo, out var retrievedKeyTwoValue); + retrievedKeyTwoValue.Should().BeSameAs(cacheValue); + + cache.Clear(); + if (shouldClearTwice) + cache.Clear(); + + cache.TryGet(keyOne, out var a); + a.Should().NotBeSameAs(cacheValue); + cache.TryGet(keyOne, out var b); + b.Should().NotBeSameAs(cacheValue); + } + + [Theory] + //[InlineData(true)] + [InlineData(false)] + public void WhenClearingCache_ShouldActuallyClearCache_2(bool shouldClearTwice) + { + var cache = new ConcurrentTLru(3, TimeSpan.FromMinutes(10)); + var keyOne = new FakeCacheKey(1); + var keyTwo = new FakeCacheKey(2); + var cacheValue = new FakeCacheValue(); + + cache.AddOrUpdate(keyOne, cacheValue); + cache.AddOrUpdate(keyTwo, cacheValue); + + cache.TryGet(keyOne, out var retrievedKeyValueOne); + retrievedKeyValueOne.Should().BeSameAs(cacheValue); + cache.TryGet(keyTwo, out var retrievedKeyTwoValue); + retrievedKeyTwoValue.Should().BeSameAs(cacheValue); + + cache.Clear(); + if (shouldClearTwice) + cache.Clear(); + + var a = cache.TryGet(keyOne, out _); + a.Should().BeFalse(); + var b = cache.TryGet(keyOne, out _); + a.Should().BeFalse(); + } +#endif } } diff --git a/BitFaster.Caching/Lru/ConcurrentLruCore.cs b/BitFaster.Caching/Lru/ConcurrentLruCore.cs index 72267737..7f89c50e 100644 --- a/BitFaster.Caching/Lru/ConcurrentLruCore.cs +++ b/BitFaster.Caching/Lru/ConcurrentLruCore.cs @@ -502,11 +502,11 @@ void RemoveDiscardableItems(ConcurrentQueue q, ref int queueCounter) private void TrimLiveItems(int itemsRemoved, int itemCount, ItemRemovedReason reason) { // When items are touched, they are moved to warm by cycling. Therefore, to guarantee - // that we can remove itemCount items, we must cycle capacity.Warm + capacity.Hot times. + // that we can remove itemCount items, we must cycle (2 * capacity.Warm) + capacity.Hot times. // If clear is called during trimming, it would be possible to get stuck in an infinite // loop here. The warm + hot limit also guards against this case. int trimWarmAttempts = 0; - int maxWarmHotAttempts = this.capacity.Warm + this.capacity.Hot; + int maxWarmHotAttempts = (this.capacity.Warm * 2) + this.capacity.Hot; while (itemsRemoved < itemCount && trimWarmAttempts < maxWarmHotAttempts) { From 6f2dfb0efd95f6c4604e8885c97ee4e8ffd474a0 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Mon, 6 Nov 2023 18:09:05 -0800 Subject: [PATCH 4/4] cleanup --- .../Lru/ConcurrentLruTests.cs | 22 +++++++ .../Lru/ConcurrentTLruTests.cs | 64 ------------------- 2 files changed, 22 insertions(+), 64 deletions(-) diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs index 142ee92d..4c5d5266 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs @@ -874,6 +874,28 @@ public void WhenItemsExistClearRemovesAllItems() lru.ColdCount.Should().Be(0); } + // This is a special case: + // Cycle 1: hot => warm + // Cycle 2: warm => warm + // Cycle 3: warm => cold + // Cycle 4: cold => remove + // Cycle 5: cold => remove + [Fact] + public void WhenCacheIsSize3ItemsExistAndItemsAccessedClearRemovesAllItems() + { + lru = new ConcurrentLru(3); + + lru.AddOrUpdate(1, "1"); + lru.AddOrUpdate(2, "1"); + + lru.TryGet(1, out _); + lru.TryGet(2, out _); + + lru.Clear(); + + lru.Count.Should().Be(0); + } + [Theory] [InlineData(1)] [InlineData(2)] diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs index 9da3f670..16822234 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentTLruTests.cs @@ -218,69 +218,5 @@ protected override ICache CreateTLru(ICapacityPartition capacity, Ti // backcompat: use TlruStopwatchPolicy return new ConcurrentLruCore, TLruLongTicksPolicy, TelemetryPolicy>(1, capacity, EqualityComparer.Default, new TLruLongTicksPolicy(timeToLive), default); } - -#if NET6_0_OR_GREATER - private record FakeCacheKey(int SomeProperty); - private record FakeCacheValue - { - public int SomeProperty { get; set; } - }; - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void WhenClearingCache_ShouldActuallyClearCache(bool shouldClearTwice) - { - var cache = new ConcurrentTLru(3, TimeSpan.FromMinutes(10)); - var keyOne = new FakeCacheKey(1); - var keyTwo = new FakeCacheKey(2); - var cacheValue = new FakeCacheValue(); - - cache.AddOrUpdate(keyOne, cacheValue); - cache.AddOrUpdate(keyTwo, cacheValue); - - cache.TryGet(keyOne, out var retrievedKeyValueOne); - retrievedKeyValueOne.Should().BeSameAs(cacheValue); - cache.TryGet(keyTwo, out var retrievedKeyTwoValue); - retrievedKeyTwoValue.Should().BeSameAs(cacheValue); - - cache.Clear(); - if (shouldClearTwice) - cache.Clear(); - - cache.TryGet(keyOne, out var a); - a.Should().NotBeSameAs(cacheValue); - cache.TryGet(keyOne, out var b); - b.Should().NotBeSameAs(cacheValue); - } - - [Theory] - //[InlineData(true)] - [InlineData(false)] - public void WhenClearingCache_ShouldActuallyClearCache_2(bool shouldClearTwice) - { - var cache = new ConcurrentTLru(3, TimeSpan.FromMinutes(10)); - var keyOne = new FakeCacheKey(1); - var keyTwo = new FakeCacheKey(2); - var cacheValue = new FakeCacheValue(); - - cache.AddOrUpdate(keyOne, cacheValue); - cache.AddOrUpdate(keyTwo, cacheValue); - - cache.TryGet(keyOne, out var retrievedKeyValueOne); - retrievedKeyValueOne.Should().BeSameAs(cacheValue); - cache.TryGet(keyTwo, out var retrievedKeyTwoValue); - retrievedKeyTwoValue.Should().BeSameAs(cacheValue); - - cache.Clear(); - if (shouldClearTwice) - cache.Clear(); - - var a = cache.TryGet(keyOne, out _); - a.Should().BeFalse(); - var b = cache.TryGet(keyOne, out _); - a.Should().BeFalse(); - } -#endif } }