From 20d87a9b8bb59ffd777f23827fa10d6e7f4560bd Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Wed, 31 Aug 2022 23:46:12 -0700 Subject: [PATCH 1/6] buffer --- .../Lfu/LfuJustGetOrAdd.cs | 8 +-- .../Lru/LruJustGetOrAdd.cs | 2 +- BitFaster.Caching.HitRateAnalysis/Analysis.cs | 2 +- .../Program.cs | 2 +- .../AssemblyInitialize.cs | 7 +-- .../Lfu/BufferConfigurationTests.cs | 49 +++++++++++++++++++ .../Lfu/ConcurrentLfuTests.cs | 24 ++++----- BitFaster.Caching/Lfu/BufferConfiguration.cs | 43 ++++++++++++++++ .../Lfu/Builder/AsyncConcurrentLfuBuilder.cs | 2 +- BitFaster.Caching/Lfu/Builder/LfuInfo.cs | 14 ++++++ BitFaster.Caching/Lfu/ConcurrentLfu.cs | 10 ++-- BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs | 2 +- 12 files changed, 132 insertions(+), 33 deletions(-) create mode 100644 BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs create mode 100644 BitFaster.Caching/Lfu/BufferConfiguration.cs diff --git a/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs b/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs index 7fef80d6..54c02e74 100644 --- a/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs +++ b/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs @@ -28,11 +28,11 @@ public class LfuJustGetOrAdd const int stripes = 1; private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler(); - private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(stripes, 9, background, EqualityComparer.Default); + private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(stripes, 9, background, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); - private static readonly ConcurrentLfu concurrentLfuFore = new ConcurrentLfu(stripes, 9, new ForegroundScheduler(), EqualityComparer.Default); - private static readonly ConcurrentLfu concurrentLfuTp = new ConcurrentLfu(stripes, 9, new ThreadPoolScheduler(), EqualityComparer.Default); - private static readonly ConcurrentLfu concurrentLfuNull = new ConcurrentLfu(stripes, 9, new NullScheduler(), EqualityComparer.Default); + private static readonly ConcurrentLfu concurrentLfuFore = new ConcurrentLfu(stripes, 9, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private static readonly ConcurrentLfu concurrentLfuTp = new ConcurrentLfu(stripes, 9, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private static readonly ConcurrentLfu concurrentLfuNull = new ConcurrentLfu(stripes, 9, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); [GlobalSetup] public void GlobalSetup() diff --git a/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs b/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs index d135a145..65d7bec7 100644 --- a/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs +++ b/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs @@ -52,7 +52,7 @@ public class LruJustGetOrAdd private static readonly ICache atomicFastLru = new ConcurrentLruBuilder().WithConcurrencyLevel(8).WithCapacity(9).WithAtomicGetOrAdd().Build(); private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler(); - private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(1, 9, background, EqualityComparer.Default); + private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(1, 9, background, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); private static readonly int key = 1; diff --git a/BitFaster.Caching.HitRateAnalysis/Analysis.cs b/BitFaster.Caching.HitRateAnalysis/Analysis.cs index 1c9558a4..191ecc8c 100644 --- a/BitFaster.Caching.HitRateAnalysis/Analysis.cs +++ b/BitFaster.Caching.HitRateAnalysis/Analysis.cs @@ -22,7 +22,7 @@ public Analysis(int cacheSize) { concurrentLru = new ConcurrentLru(1, cacheSize, EqualityComparer.Default); classicLru = new ClassicLru(1, cacheSize, EqualityComparer.Default); - concurrentLfu = new ConcurrentLfu(1, cacheSize, new ForegroundScheduler(), EqualityComparer.Default); + concurrentLfu = new ConcurrentLfu(1, cacheSize, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); } public int CacheSize => concurrentLru.Capacity; diff --git a/BitFaster.Caching.ThroughputAnalysis/Program.cs b/BitFaster.Caching.ThroughputAnalysis/Program.cs index 81bdf720..551c8d4e 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Program.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Program.cs @@ -102,7 +102,7 @@ static void Main(string[] args) for (int i = 0; i < warmup + runs; i++) { var scheduler = new BackgroundThreadScheduler(); - results[i] = MeasureThroughput(new ConcurrentLfu(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer.Default), tc); + results[i] = MeasureThroughput(new ConcurrentLfu(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(concurrencyLevel: tc, capacity: capacity)), tc); scheduler.Dispose(); } avg = AverageLast(results, runs) / 1000000; diff --git a/BitFaster.Caching.UnitTests/AssemblyInitialize.cs b/BitFaster.Caching.UnitTests/AssemblyInitialize.cs index 4dbcd84d..120917fe 100644 --- a/BitFaster.Caching.UnitTests/AssemblyInitialize.cs +++ b/BitFaster.Caching.UnitTests/AssemblyInitialize.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; +using System.Threading; using Xunit.Abstractions; using Xunit.Sdk; diff --git a/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs b/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs new file mode 100644 index 00000000..4d42db44 --- /dev/null +++ b/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs @@ -0,0 +1,49 @@ +using BitFaster.Caching.Lfu; +using FluentAssertions; +using Xunit; + +namespace BitFaster.Caching.UnitTests.Lfu +{ + public class BufferConfigurationTests + { + [Theory] + [InlineData(1, 3, 1, 4, 1, 4)] + [InlineData(1, 100, 1, 128, 1, 128)] + [InlineData(4, 100, 4, 128, 4, 32)] + [InlineData(8, 100, 8, 128, 8, 16)] + [InlineData(12, 100, 16, 128, 16, 8)] + [InlineData(16, 100, 16, 128, 16, 8)] + [InlineData(24, 100, 32, 128, 32, 4)] + [InlineData(64, 100, 64, 64, 64, 4)] + [InlineData(96, 100, 128, 32, 128, 4)] + [InlineData(1, 1000, 1, 128, 1, 128)] + [InlineData(2, 1000, 2, 128, 2, 128)] + [InlineData(4, 1000, 4, 128, 4, 128)] + [InlineData(8, 1000, 8, 128, 8, 128)] + [InlineData(16, 1000, 16, 128, 16, 64)] + [InlineData(32, 1000, 32, 128, 32, 32)] + [InlineData(64, 1000, 64, 64, 64, 16)] + [InlineData(128, 1000, 128, 32, 128, 8)] + [InlineData(256, 1000, 256, 16, 256, 4)] + [InlineData(1, 10000, 1, 128, 1, 128)] + [InlineData(4, 10000, 4, 128, 4, 128)] + [InlineData(8, 10000, 8, 128, 8, 128)] + [InlineData(16, 10000, 16, 128, 16, 128)] + [InlineData(32, 10000, 32, 128, 32, 64)] + [InlineData(64, 10000, 64, 64, 64, 32)] + [InlineData(128, 10000, 128, 32, 128, 16)] + [InlineData(256, 10000, 256, 16, 256, 8)] + [InlineData(1, 100000, 1, 128, 1, 128)] + [InlineData(32, 100000, 32, 128, 32, 64)] + [InlineData(256, 100000, 256, 16, 256, 8)] + public void CalculateDefaultBufferConfiguration(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer) + { + var bufferConfig = BufferConfiguration.CreateDefault(concurrencyLevel, capacity); + + bufferConfig.ReadBufferStripes.Should().Be(expectedReadStripes); + bufferConfig.ReadBufferSize.Should().Be(expectedReadBuffer); + bufferConfig.WriteBufferStripes.Should().Be(expecteWriteStripes); + bufferConfig.WriteBufferSize.Should().Be(expecteWriteBuffer); + } + } +} diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs index b54457d4..5dc7d5f0 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs @@ -19,7 +19,7 @@ public class ConcurrentLfuTests { private readonly ITestOutputHelper output; - private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); private ValueFactory valueFactory = new ValueFactory(); public ConcurrentLfuTests(ITestOutputHelper output) @@ -76,7 +76,7 @@ public void WhenItemsAddedExceedsCapacityItemsAreDiscarded() [Fact] public void WhenItemIsEvictedItIsDisposed() { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); var disposables = new DisposableItem[25]; for (int i = 0; i < 25; i++) @@ -306,7 +306,7 @@ public void WriteUpdatesProtectedLruOrder() [Fact] public void WhenHitRateChangesWindowSizeIsAdapted() { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); // First completely fill the cache, push entries into protected for (int i = 0; i < 20; i++) @@ -375,7 +375,7 @@ public void WhenHitRateChangesWindowSizeIsAdapted() public void ReadSchedulesMaintenanceWhenBufferIsFull() { var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); @@ -397,7 +397,7 @@ public void WhenReadBufferIsFullReadsAreDropped() { int bufferSize = ConcurrentLfu.BufferSize; var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); @@ -417,7 +417,7 @@ public void WhenReadBufferIsFullReadsAreDropped() public void WhenWriteBufferIsFullAddDoesMaintenance() { var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, ConcurrentLfu.BufferSize * 2, scheduler, EqualityComparer.Default); + cache = new ConcurrentLfu(1, ConcurrentLfu.BufferSize * 2, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); // add an item, flush write buffer cache.GetOrAdd(-1, k => k); @@ -446,7 +446,7 @@ public void WhenWriteBufferIsFullUpdatesAreDropped() { int bufferSize = ConcurrentLfu.BufferSize; var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); @@ -578,7 +578,7 @@ public void WhenItemIsRemovedItIsRemoved() [Fact] public void WhenItemIsRemovedItIsDisposed() { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); var disposable = new DisposableItem(); dcache.GetOrAdd(1, k => disposable); @@ -667,7 +667,7 @@ public void TrimRemovesNItems() public void TrimWhileItemsInWriteBufferRemovesNItems() { // null scheduler == no maintenance, all writes fit in buffer - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); for (int i = 0; i < 25; i++) { @@ -705,7 +705,7 @@ public void VerifyHitsWithBackgroundScheduler() public void VerifyHitsWithThreadPoolScheduler() { // when running all tests in parallel, sample count drops significantly: set low bar for stability. - cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); VerifyHits(iterations: 10000000, minSamples: 500000); } @@ -715,7 +715,7 @@ public void VerifyHitsWithThreadPoolScheduler() [Fact] public void VerifyHitsWithNullScheduler() { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); VerifyHits(iterations: 10000000, minSamples: -1); } @@ -726,7 +726,7 @@ public void VerifyHitsWithNullScheduler() [Fact] public void VerifyHitsWithForegroundScheduler() { - cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default); + cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false // before TryScheduleDrain is called. This serves as sanity check. diff --git a/BitFaster.Caching/Lfu/BufferConfiguration.cs b/BitFaster.Caching/Lfu/BufferConfiguration.cs new file mode 100644 index 00000000..924ee926 --- /dev/null +++ b/BitFaster.Caching/Lfu/BufferConfiguration.cs @@ -0,0 +1,43 @@ + +using System; + +namespace BitFaster.Caching.Lfu +{ + public class BufferConfiguration + { + private const int MaxReadBufferTotalSize = 4096; + private const int MaxWriteBufferTotalSize = 2048; + + public int ReadBufferStripes { get; set; } + + public int ReadBufferSize { get; set; } + + public int WriteBufferStripes { get; set; } + + public int WriteBufferSize { get; set; } + + public static BufferConfiguration CreateDefault(int concurrencyLevel, int capacity) + { + concurrencyLevel = BitOps.CeilingPowerOfTwo(concurrencyLevel); + + // Estimate total read buffer size based on capacity and concurrency, up to a maximum of MaxReadBufferTotalSize. + // Stripe based on concurrency, with a minimum and maximum buffer size (between 4 and 128). + // Total size becomes 4 * concurrency level when concurrencyLevel * capacity > MaxReadBufferTotalSize. + int readBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(concurrencyLevel * capacity), MaxReadBufferTotalSize); + int readStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(readBufferTotalSize / concurrencyLevel, 4)), 128); + + // Try to constrain write buffer size so that the LFU dictionary will not ever end up with more than 2x cache + // capacity entries before maintenance runs. + int writeBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), MaxWriteBufferTotalSize); + int writeStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(writeBufferTotalSize / concurrencyLevel, 4)), 128); + + return new BufferConfiguration() + { + ReadBufferStripes = concurrencyLevel, + ReadBufferSize = 128, + WriteBufferStripes = concurrencyLevel, + WriteBufferSize = writeStripeSize, + }; + } + } +} diff --git a/BitFaster.Caching/Lfu/Builder/AsyncConcurrentLfuBuilder.cs b/BitFaster.Caching/Lfu/Builder/AsyncConcurrentLfuBuilder.cs index 314f42fe..fe6a99aa 100644 --- a/BitFaster.Caching/Lfu/Builder/AsyncConcurrentLfuBuilder.cs +++ b/BitFaster.Caching/Lfu/Builder/AsyncConcurrentLfuBuilder.cs @@ -14,7 +14,7 @@ internal AsyncConcurrentLfuBuilder(LfuInfo info) /// public override IAsyncCache Build() { - return new ConcurrentLfu(info.ConcurrencyLevel, info.Capacity, info.Scheduler, info.KeyComparer); + return new ConcurrentLfu(info.ConcurrencyLevel, info.Capacity, info.Scheduler, info.KeyComparer, info.BufferConfiguration); } } } diff --git a/BitFaster.Caching/Lfu/Builder/LfuInfo.cs b/BitFaster.Caching/Lfu/Builder/LfuInfo.cs index 4415a16b..b0704b89 100644 --- a/BitFaster.Caching/Lfu/Builder/LfuInfo.cs +++ b/BitFaster.Caching/Lfu/Builder/LfuInfo.cs @@ -8,6 +8,8 @@ namespace BitFaster.Caching.Lfu.Builder { public sealed class LfuInfo { + private BufferConfiguration bufferConfiguration; + public int Capacity { get; set; } = 128; public int ConcurrencyLevel { get; set; } = Defaults.ConcurrencyLevel; @@ -15,5 +17,17 @@ public sealed class LfuInfo public IScheduler Scheduler { get; set; } = new ThreadPoolScheduler(); public IEqualityComparer KeyComparer { get; set; } = EqualityComparer.Default; + + public BufferConfiguration BufferConfiguration + { + get + { + return this.bufferConfiguration ?? BufferConfiguration.CreateDefault(ConcurrencyLevel, Capacity); + } + set + { + bufferConfiguration = value; + } + } } } diff --git a/BitFaster.Caching/Lfu/ConcurrentLfu.cs b/BitFaster.Caching/Lfu/ConcurrentLfu.cs index 90f08eaf..0712f143 100644 --- a/BitFaster.Caching/Lfu/ConcurrentLfu.cs +++ b/BitFaster.Caching/Lfu/ConcurrentLfu.cs @@ -64,18 +64,16 @@ public sealed class ConcurrentLfu : ICache, IAsyncCache, IBoun #endif public ConcurrentLfu(int capacity) - : this(Defaults.ConcurrencyLevel, capacity, new ThreadPoolScheduler(), EqualityComparer.Default) + : this(Defaults.ConcurrencyLevel, capacity, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(Defaults.ConcurrencyLevel, capacity)) { } - public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer) + public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer, BufferConfiguration bufferConfiguration) { this.dictionary = new ConcurrentDictionary>(concurrencyLevel, capacity, comparer); - this.readBuffer = new StripedMpscBuffer>(concurrencyLevel, BufferSize); - - // TODO: how big should this be in total? We shouldn't allow more than some capacity % of writes in the buffer - this.writeBuffer = new StripedMpscBuffer>(concurrencyLevel, BufferSize); + this.readBuffer = new StripedMpscBuffer>(bufferConfiguration.ReadBufferStripes, bufferConfiguration.ReadBufferSize); + this.writeBuffer = new StripedMpscBuffer>(bufferConfiguration.WriteBufferStripes, bufferConfiguration.WriteBufferSize); this.cmSketch = new CmSketch(1, comparer); this.cmSketch.EnsureCapacity(capacity); diff --git a/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs b/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs index ff9c568e..fafe2c41 100644 --- a/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs +++ b/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs @@ -23,7 +23,7 @@ internal ConcurrentLfuBuilder(LfuInfo info) public override ICache Build() { // TODO: key comparer - return new ConcurrentLfu(info.ConcurrencyLevel, info.Capacity, info.Scheduler, info.KeyComparer); + return new ConcurrentLfu(info.ConcurrencyLevel, info.Capacity, info.Scheduler, info.KeyComparer, info.BufferConfiguration); } } } From 498731ec39cc23d34163c0ba0b2491e6efe6829a Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Fri, 2 Sep 2022 16:16:51 -0700 Subject: [PATCH 2/6] cleanup --- .../Lfu/LfuJustGetOrAdd.cs | 8 +-- .../Lru/LruJustGetOrAdd.cs | 2 +- BitFaster.Caching.HitRateAnalysis/Analysis.cs | 2 +- .../Program.cs | 4 +- .../Buffers/StripedBufferSizeTests.cs | 42 +++++++++++++ .../Lfu/BufferConfigurationTests.cs | 42 ++++--------- .../Lfu/ConcurrentLfuBuilderTests.cs | 16 +++-- .../Lfu/ConcurrentLfuTests.cs | 44 ++++++------- .../Scheduler/BackgroundSchedulerTests.cs | 19 +++--- .../Buffers/StripedBufferSize.cs | 27 ++++++++ .../Buffers/StripedMpmcBuffer.cs | 12 ++-- .../Buffers/StripedMpscBuffer.cs | 12 ++-- BitFaster.Caching/Lfu/BufferConfiguration.cs | 43 ------------- .../Lfu/Builder/LfuBuilderBase.cs | 12 ++++ BitFaster.Caching/Lfu/Builder/LfuInfo.cs | 6 +- BitFaster.Caching/Lfu/ConcurrentLfu.cs | 13 ++-- BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs | 5 +- BitFaster.Caching/Lfu/LfuBufferSize.cs | 63 +++++++++++++++++++ .../Scheduler/BackgroundThreadScheduler.cs | 15 +---- 19 files changed, 232 insertions(+), 155 deletions(-) create mode 100644 BitFaster.Caching.UnitTests/Buffers/StripedBufferSizeTests.cs create mode 100644 BitFaster.Caching/Buffers/StripedBufferSize.cs delete mode 100644 BitFaster.Caching/Lfu/BufferConfiguration.cs create mode 100644 BitFaster.Caching/Lfu/LfuBufferSize.cs diff --git a/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs b/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs index dc489a6d..47b24376 100644 --- a/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs +++ b/BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs @@ -22,11 +22,11 @@ public class LfuJustGetOrAdd const int stripes = 1; private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler(); - private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(stripes, 9, background, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(stripes, 9, background, EqualityComparer.Default, LfuBufferSize.Default(1, 128)); - private static readonly ConcurrentLfu concurrentLfuFore = new ConcurrentLfu(stripes, 9, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); - private static readonly ConcurrentLfu concurrentLfuTp = new ConcurrentLfu(stripes, 9, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); - private static readonly ConcurrentLfu concurrentLfuNull = new ConcurrentLfu(stripes, 9, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private static readonly ConcurrentLfu concurrentLfuFore = new ConcurrentLfu(stripes, 9, new ForegroundScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); + private static readonly ConcurrentLfu concurrentLfuTp = new ConcurrentLfu(stripes, 9, new ThreadPoolScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); + private static readonly ConcurrentLfu concurrentLfuNull = new ConcurrentLfu(stripes, 9, new NullScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); [GlobalSetup] public void GlobalSetup() diff --git a/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs b/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs index 1fde2c0f..bd301498 100644 --- a/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs +++ b/BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs @@ -48,7 +48,7 @@ public class LruJustGetOrAdd private static readonly ICache atomicFastLru = new ConcurrentLruBuilder().WithConcurrencyLevel(8).WithCapacity(9).WithAtomicGetOrAdd().Build(); private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler(); - private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(1, 9, background, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private static readonly ConcurrentLfu concurrentLfu = new ConcurrentLfu(1, 9, background, EqualityComparer.Default, LfuBufferSize.Default(1, 128)); private static readonly int key = 1; diff --git a/BitFaster.Caching.HitRateAnalysis/Analysis.cs b/BitFaster.Caching.HitRateAnalysis/Analysis.cs index 191ecc8c..716fad9f 100644 --- a/BitFaster.Caching.HitRateAnalysis/Analysis.cs +++ b/BitFaster.Caching.HitRateAnalysis/Analysis.cs @@ -22,7 +22,7 @@ public Analysis(int cacheSize) { concurrentLru = new ConcurrentLru(1, cacheSize, EqualityComparer.Default); classicLru = new ClassicLru(1, cacheSize, EqualityComparer.Default); - concurrentLfu = new ConcurrentLfu(1, cacheSize, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + concurrentLfu = new ConcurrentLfu(1, cacheSize, new ForegroundScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); } public int CacheSize => concurrentLru.Capacity; diff --git a/BitFaster.Caching.ThroughputAnalysis/Program.cs b/BitFaster.Caching.ThroughputAnalysis/Program.cs index 551c8d4e..fb84d86d 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Program.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Program.cs @@ -22,7 +22,7 @@ class Program const double s = 0.86; const int n = 500; static int capacity = 500; - const int maxThreads = 52; + const int maxThreads = 64; const int sampleCount = 2000; const int repeatCount = 400; @@ -102,7 +102,7 @@ static void Main(string[] args) for (int i = 0; i < warmup + runs; i++) { var scheduler = new BackgroundThreadScheduler(); - results[i] = MeasureThroughput(new ConcurrentLfu(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(concurrencyLevel: tc, capacity: capacity)), tc); + results[i] = MeasureThroughput(new ConcurrentLfu(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer.Default, LfuBufferSize.Default(concurrencyLevel: tc, capacity: capacity)), tc); scheduler.Dispose(); } avg = AverageLast(results, runs) / 1000000; diff --git a/BitFaster.Caching.UnitTests/Buffers/StripedBufferSizeTests.cs b/BitFaster.Caching.UnitTests/Buffers/StripedBufferSizeTests.cs new file mode 100644 index 00000000..d9eed4af --- /dev/null +++ b/BitFaster.Caching.UnitTests/Buffers/StripedBufferSizeTests.cs @@ -0,0 +1,42 @@ +using System; +using BitFaster.Caching.Buffers; +using FluentAssertions; +using Xunit; + +namespace BitFaster.Caching.UnitTests.Buffers +{ + public class StripedBufferSizeTests + { + [Fact] + public void WhenBufferSizeIsLessThan1CtorThrows() + { + Action constructor = () => { var x = new StripedBufferSize(-1, 1); }; + + constructor.Should().Throw(); + } + + [Fact] + public void WhenStripeCountIsLessThan1CtorThrows() + { + Action constructor = () => { var x = new StripedBufferSize(1, -1); }; + + constructor.Should().Throw(); + } + + [Fact] + public void SizeIsRoundedToNextPowerOf2() + { + var bs = new StripedBufferSize(6, 16); + + bs.BufferSize.Should().Be(8); + } + + [Fact] + public void StripeCountIsRoundedToNextPowerOf2() + { + var bs = new StripedBufferSize(16, 6); + + bs.StripeCount.Should().Be(8); + } + } +} diff --git a/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs b/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs index 4d42db44..d9a79972 100644 --- a/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs @@ -7,43 +7,25 @@ namespace BitFaster.Caching.UnitTests.Lfu public class BufferConfigurationTests { [Theory] - [InlineData(1, 3, 1, 4, 1, 4)] + [InlineData(1, 3, 1, 32, 1, 16)] + [InlineData(1, 14, 1, 128, 1, 16)] + [InlineData(1, 50, 1, 128, 1, 64)] [InlineData(1, 100, 1, 128, 1, 128)] [InlineData(4, 100, 4, 128, 4, 32)] - [InlineData(8, 100, 8, 128, 8, 16)] - [InlineData(12, 100, 16, 128, 16, 8)] - [InlineData(16, 100, 16, 128, 16, 8)] - [InlineData(24, 100, 32, 128, 32, 4)] - [InlineData(64, 100, 64, 64, 64, 4)] - [InlineData(96, 100, 128, 32, 128, 4)] + [InlineData(16, 100, 8, 128, 8, 16)] + [InlineData(64, 100, 8, 128, 8, 16)] [InlineData(1, 1000, 1, 128, 1, 128)] - [InlineData(2, 1000, 2, 128, 2, 128)] [InlineData(4, 1000, 4, 128, 4, 128)] - [InlineData(8, 1000, 8, 128, 8, 128)] - [InlineData(16, 1000, 16, 128, 16, 64)] [InlineData(32, 1000, 32, 128, 32, 32)] - [InlineData(64, 1000, 64, 64, 64, 16)] - [InlineData(128, 1000, 128, 32, 128, 8)] - [InlineData(256, 1000, 256, 16, 256, 4)] - [InlineData(1, 10000, 1, 128, 1, 128)] - [InlineData(4, 10000, 4, 128, 4, 128)] - [InlineData(8, 10000, 8, 128, 8, 128)] - [InlineData(16, 10000, 16, 128, 16, 128)] - [InlineData(32, 10000, 32, 128, 32, 64)] - [InlineData(64, 10000, 64, 64, 64, 32)] - [InlineData(128, 10000, 128, 32, 128, 16)] - [InlineData(256, 10000, 256, 16, 256, 8)] - [InlineData(1, 100000, 1, 128, 1, 128)] - [InlineData(32, 100000, 32, 128, 32, 64)] - [InlineData(256, 100000, 256, 16, 256, 8)] - public void CalculateDefaultBufferConfiguration(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer) + [InlineData(256, 100000, 32, 128, 32, 32)] + public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer) { - var bufferConfig = BufferConfiguration.CreateDefault(concurrencyLevel, capacity); + var bufferSize = LfuBufferSize.Default(concurrencyLevel, capacity); - bufferConfig.ReadBufferStripes.Should().Be(expectedReadStripes); - bufferConfig.ReadBufferSize.Should().Be(expectedReadBuffer); - bufferConfig.WriteBufferStripes.Should().Be(expecteWriteStripes); - bufferConfig.WriteBufferSize.Should().Be(expecteWriteBuffer); + bufferSize.Read.StripeCount.Should().Be(expectedReadStripes); + bufferSize.Read.BufferSize.Should().Be(expectedReadBuffer); + bufferSize.Write.StripeCount.Should().Be(expecteWriteStripes); + bufferSize.Write.BufferSize.Should().Be(expecteWriteBuffer); } } } diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs index 96d9467a..06b41c4b 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs @@ -1,9 +1,6 @@ using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; using BitFaster.Caching.Atomic; +using BitFaster.Caching.Buffers; using BitFaster.Caching.Lfu; using BitFaster.Caching.Scheduler; using FluentAssertions; @@ -56,6 +53,17 @@ public void TestComparer() lfu.TryGet("A", out var value).Should().BeTrue(); } + [Fact] + public void TestBufferConfiguraiton() + { + ICache lfu = new ConcurrentLfuBuilder() + .WithBufferConfiguration(new LfuBufferSize( + new StripedBufferSize(128, 2), + new StripedBufferSize(128, 2) + )) + .Build(); + } + // 1 [Fact] public void WithScopedValues() diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs index 5dc7d5f0..da263c47 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs @@ -3,9 +3,8 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; -using System.Text; -using System.Threading; using System.Threading.Tasks; +using BitFaster.Caching.Buffers; using BitFaster.Caching.Lfu; using BitFaster.Caching.Scheduler; using BitFaster.Caching.UnitTests.Lru; @@ -19,7 +18,7 @@ public class ConcurrentLfuTests { private readonly ITestOutputHelper output; - private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + private ConcurrentLfu cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); private ValueFactory valueFactory = new ValueFactory(); public ConcurrentLfuTests(ITestOutputHelper output) @@ -76,7 +75,7 @@ public void WhenItemsAddedExceedsCapacityItemsAreDiscarded() [Fact] public void WhenItemIsEvictedItIsDisposed() { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); var disposables = new DisposableItem[25]; for (int i = 0; i < 25; i++) @@ -306,7 +305,7 @@ public void WriteUpdatesProtectedLruOrder() [Fact] public void WhenHitRateChangesWindowSizeIsAdapted() { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); // First completely fill the cache, push entries into protected for (int i = 0; i < 20; i++) @@ -375,13 +374,13 @@ public void WhenHitRateChangesWindowSizeIsAdapted() public void ReadSchedulesMaintenanceWhenBufferIsFull() { var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, LfuBufferSize.Default(1, 128)); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); cache.PendingMaintenance(); - for (int i = 0; i < ConcurrentLfu.BufferSize; i++) + for (int i = 0; i < LfuBufferSize.DefaultBufferSize; i++) { scheduler.RunCount.Should().Be(1); cache.GetOrAdd(1, k => k); @@ -395,29 +394,31 @@ public void ReadSchedulesMaintenanceWhenBufferIsFull() [Fact] public void WhenReadBufferIsFullReadsAreDropped() { - int bufferSize = ConcurrentLfu.BufferSize; var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, LfuBufferSize.Default(1, 128)); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); cache.PendingMaintenance(); - for (int i = 0; i < bufferSize * 2; i++) + for (int i = 0; i < LfuBufferSize.DefaultBufferSize * 2; i++) { cache.GetOrAdd(1, k => k); } cache.PendingMaintenance(); - cache.Metrics.Value.Hits.Should().Be(bufferSize); + cache.Metrics.Value.Hits.Should().Be(LfuBufferSize.DefaultBufferSize); } [Fact] public void WhenWriteBufferIsFullAddDoesMaintenance() { + var bufferSize = LfuBufferSize.DefaultBufferSize; var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, ConcurrentLfu.BufferSize * 2, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + + var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1)); + cache = new ConcurrentLfu(1, bufferSize * 2, scheduler, EqualityComparer.Default, bufferConfig); // add an item, flush write buffer cache.GetOrAdd(-1, k => k); @@ -430,7 +431,7 @@ public void WhenWriteBufferIsFullAddDoesMaintenance() // add buffer size items, last iteration will invoke maintenance on the foreground since write // buffer is full and test scheduler did not do any work - for (int i = 0; i < ConcurrentLfu.BufferSize; i++) + for (int i = 0; i < bufferSize; i++) { scheduler.RunCount.Should().Be(2); cache.GetOrAdd(i, k => k); @@ -444,9 +445,10 @@ public void WhenWriteBufferIsFullAddDoesMaintenance() [Fact] public void WhenWriteBufferIsFullUpdatesAreDropped() { - int bufferSize = ConcurrentLfu.BufferSize; + var bufferSize = LfuBufferSize.DefaultBufferSize; var scheduler = new TestScheduler(); - cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1)); + cache = new ConcurrentLfu(1, 20, scheduler, EqualityComparer.Default, bufferConfig); cache.GetOrAdd(1, k => k); scheduler.RunCount.Should().Be(1); @@ -578,7 +580,7 @@ public void WhenItemIsRemovedItIsRemoved() [Fact] public void WhenItemIsRemovedItIsDisposed() { - var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + var dcache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); var disposable = new DisposableItem(); dcache.GetOrAdd(1, k => disposable); @@ -667,7 +669,7 @@ public void TrimRemovesNItems() public void TrimWhileItemsInWriteBufferRemovesNItems() { // null scheduler == no maintenance, all writes fit in buffer - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); for (int i = 0; i < 25; i++) { @@ -705,7 +707,7 @@ public void VerifyHitsWithBackgroundScheduler() public void VerifyHitsWithThreadPoolScheduler() { // when running all tests in parallel, sample count drops significantly: set low bar for stability. - cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); VerifyHits(iterations: 10000000, minSamples: 500000); } @@ -715,7 +717,7 @@ public void VerifyHitsWithThreadPoolScheduler() [Fact] public void VerifyHitsWithNullScheduler() { - cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); VerifyHits(iterations: 10000000, minSamples: -1); } @@ -726,12 +728,12 @@ public void VerifyHitsWithNullScheduler() [Fact] public void VerifyHitsWithForegroundScheduler() { - cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(1, 128)); + cache = new ConcurrentLfu(1, 20, new ForegroundScheduler(), EqualityComparer.Default, LfuBufferSize.Default(1, 128)); // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false // before TryScheduleDrain is called. This serves as sanity check. int iterations = 10000000; - int dropped = iterations / ConcurrentLfu.BufferSize; + int dropped = iterations / LfuBufferSize.DefaultBufferSize; this.output.WriteLine($"Will drop {dropped} reads."); diff --git a/BitFaster.Caching.UnitTests/Scheduler/BackgroundSchedulerTests.cs b/BitFaster.Caching.UnitTests/Scheduler/BackgroundSchedulerTests.cs index 5a99aaab..08e7d1c9 100644 --- a/BitFaster.Caching.UnitTests/Scheduler/BackgroundSchedulerTests.cs +++ b/BitFaster.Caching.UnitTests/Scheduler/BackgroundSchedulerTests.cs @@ -69,23 +69,18 @@ public async Task WhenWorkThrowsLastExceptionIsPopulated() } [Fact] - public void WhenBacklogExceededThrows() + public void WhenBacklogExceededTasksAreDropped() { TaskCompletionSource tcs = new TaskCompletionSource(); - Action start = () => + for (int i = 0; i < BackgroundThreadScheduler.MaxBacklog * 2; i++) { - // Add 2 because 1 thread *may* be released, start running and then block before we attempt to schedule all tasks. - // this leaves BackgroundThreadScheduler.MaxBacklog slots available. So we need + 2 to guarantee all slots are - // used. - for (int i = 0; i < BackgroundThreadScheduler.MaxBacklog + 2; i++) - { - scheduler.Run(() => { tcs.Task.Wait(); }); - } - }; - - start.Should().Throw(); + scheduler.Run(() => { tcs.Task.Wait(); }); + } + tcs.SetResult(); + + scheduler.RunCount.Should().BeCloseTo(BackgroundThreadScheduler.MaxBacklog, 1); } [Fact] diff --git a/BitFaster.Caching/Buffers/StripedBufferSize.cs b/BitFaster.Caching/Buffers/StripedBufferSize.cs new file mode 100644 index 00000000..22fcacd5 --- /dev/null +++ b/BitFaster.Caching/Buffers/StripedBufferSize.cs @@ -0,0 +1,27 @@ +using System; + +namespace BitFaster.Caching.Buffers +{ + public sealed class StripedBufferSize + { + public StripedBufferSize(int bufferSize, int stripeCount) + { + if (bufferSize < 1) + { + throw new ArgumentOutOfRangeException(nameof(bufferSize)); + } + + if (stripeCount < 1) + { + throw new ArgumentOutOfRangeException(nameof(stripeCount)); + } + + BufferSize = BitOps.CeilingPowerOfTwo(bufferSize); + StripeCount = BitOps.CeilingPowerOfTwo(stripeCount); + } + + public int BufferSize { get; } + + public int StripeCount { get; } + } +} diff --git a/BitFaster.Caching/Buffers/StripedMpmcBuffer.cs b/BitFaster.Caching/Buffers/StripedMpmcBuffer.cs index 5fd36f58..b1d1c0e0 100644 --- a/BitFaster.Caching/Buffers/StripedMpmcBuffer.cs +++ b/BitFaster.Caching/Buffers/StripedMpmcBuffer.cs @@ -19,13 +19,17 @@ public sealed class StripedMpmcBuffer private MpmcBoundedBuffer[] buffers; public StripedMpmcBuffer(int stripeCount, int bufferSize) + : this(new StripedBufferSize(bufferSize, stripeCount)) { - stripeCount = BitOps.CeilingPowerOfTwo(stripeCount); - buffers = new MpmcBoundedBuffer[stripeCount]; + } + + public StripedMpmcBuffer(StripedBufferSize bufferSize) + { + buffers = new MpmcBoundedBuffer[bufferSize.StripeCount]; - for (var i = 0; i < stripeCount; i++) + for (var i = 0; i < bufferSize.StripeCount; i++) { - buffers[i] = new MpmcBoundedBuffer(bufferSize); + buffers[i] = new MpmcBoundedBuffer(bufferSize.BufferSize); } } diff --git a/BitFaster.Caching/Buffers/StripedMpscBuffer.cs b/BitFaster.Caching/Buffers/StripedMpscBuffer.cs index 951dd45c..bb6f45e1 100644 --- a/BitFaster.Caching/Buffers/StripedMpscBuffer.cs +++ b/BitFaster.Caching/Buffers/StripedMpscBuffer.cs @@ -22,13 +22,17 @@ public sealed class StripedMpscBuffer where T : class private MpscBoundedBuffer[] buffers; public StripedMpscBuffer(int stripeCount, int bufferSize) + : this(new StripedBufferSize(bufferSize, stripeCount)) + { + } + + public StripedMpscBuffer(StripedBufferSize bufferSize) { - stripeCount = BitOps.CeilingPowerOfTwo(stripeCount); - buffers = new MpscBoundedBuffer[stripeCount]; + buffers = new MpscBoundedBuffer[bufferSize.StripeCount]; - for (var i = 0; i < stripeCount; i++) + for (var i = 0; i < bufferSize.StripeCount; i++) { - buffers[i] = new MpscBoundedBuffer(bufferSize); + buffers[i] = new MpscBoundedBuffer(bufferSize.BufferSize); } } diff --git a/BitFaster.Caching/Lfu/BufferConfiguration.cs b/BitFaster.Caching/Lfu/BufferConfiguration.cs deleted file mode 100644 index 924ee926..00000000 --- a/BitFaster.Caching/Lfu/BufferConfiguration.cs +++ /dev/null @@ -1,43 +0,0 @@ - -using System; - -namespace BitFaster.Caching.Lfu -{ - public class BufferConfiguration - { - private const int MaxReadBufferTotalSize = 4096; - private const int MaxWriteBufferTotalSize = 2048; - - public int ReadBufferStripes { get; set; } - - public int ReadBufferSize { get; set; } - - public int WriteBufferStripes { get; set; } - - public int WriteBufferSize { get; set; } - - public static BufferConfiguration CreateDefault(int concurrencyLevel, int capacity) - { - concurrencyLevel = BitOps.CeilingPowerOfTwo(concurrencyLevel); - - // Estimate total read buffer size based on capacity and concurrency, up to a maximum of MaxReadBufferTotalSize. - // Stripe based on concurrency, with a minimum and maximum buffer size (between 4 and 128). - // Total size becomes 4 * concurrency level when concurrencyLevel * capacity > MaxReadBufferTotalSize. - int readBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(concurrencyLevel * capacity), MaxReadBufferTotalSize); - int readStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(readBufferTotalSize / concurrencyLevel, 4)), 128); - - // Try to constrain write buffer size so that the LFU dictionary will not ever end up with more than 2x cache - // capacity entries before maintenance runs. - int writeBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), MaxWriteBufferTotalSize); - int writeStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(writeBufferTotalSize / concurrencyLevel, 4)), 128); - - return new BufferConfiguration() - { - ReadBufferStripes = concurrencyLevel, - ReadBufferSize = 128, - WriteBufferStripes = concurrencyLevel, - WriteBufferSize = writeStripeSize, - }; - } - } -} diff --git a/BitFaster.Caching/Lfu/Builder/LfuBuilderBase.cs b/BitFaster.Caching/Lfu/Builder/LfuBuilderBase.cs index cf815c14..e19b87e9 100644 --- a/BitFaster.Caching/Lfu/Builder/LfuBuilderBase.cs +++ b/BitFaster.Caching/Lfu/Builder/LfuBuilderBase.cs @@ -59,6 +59,18 @@ public TBuilder WithKeyComparer(IEqualityComparer comparer) return this as TBuilder; } + /// + /// Use the specified buffer configuration. Smaller buffers consume less memory, larger buffers can + /// increase concurrent throughput. + /// + /// The buffer configuration to use. + /// A ConcurrentLfuBuilder + public TBuilder WithBufferConfiguration(LfuBufferSize bufferConfiguration) + { + this.info.BufferConfiguration = bufferConfiguration; + return this as TBuilder; + } + /// /// Builds a cache configured via the method calls invoked on the builder instance. /// diff --git a/BitFaster.Caching/Lfu/Builder/LfuInfo.cs b/BitFaster.Caching/Lfu/Builder/LfuInfo.cs index b0704b89..1cfb489e 100644 --- a/BitFaster.Caching/Lfu/Builder/LfuInfo.cs +++ b/BitFaster.Caching/Lfu/Builder/LfuInfo.cs @@ -8,7 +8,7 @@ namespace BitFaster.Caching.Lfu.Builder { public sealed class LfuInfo { - private BufferConfiguration bufferConfiguration; + private LfuBufferSize bufferConfiguration; public int Capacity { get; set; } = 128; @@ -18,11 +18,11 @@ public sealed class LfuInfo public IEqualityComparer KeyComparer { get; set; } = EqualityComparer.Default; - public BufferConfiguration BufferConfiguration + public LfuBufferSize BufferConfiguration { get { - return this.bufferConfiguration ?? BufferConfiguration.CreateDefault(ConcurrencyLevel, Capacity); + return this.bufferConfiguration ?? LfuBufferSize.Default(ConcurrencyLevel, Capacity); } set { diff --git a/BitFaster.Caching/Lfu/ConcurrentLfu.cs b/BitFaster.Caching/Lfu/ConcurrentLfu.cs index 0712f143..621f3e55 100644 --- a/BitFaster.Caching/Lfu/ConcurrentLfu.cs +++ b/BitFaster.Caching/Lfu/ConcurrentLfu.cs @@ -1,7 +1,6 @@ using System; -#if NETSTANDARD2_0 -#else +#if !NETSTANDARD2_0 using System.Buffers; #endif @@ -37,8 +36,6 @@ public sealed class ConcurrentLfu : ICache, IAsyncCache, IBoun { private const int MaxWriteBufferRetries = 16; - public const int BufferSize = 128; - private readonly ConcurrentDictionary> dictionary; private readonly StripedMpscBuffer> readBuffer; @@ -64,16 +61,16 @@ public sealed class ConcurrentLfu : ICache, IAsyncCache, IBoun #endif public ConcurrentLfu(int capacity) - : this(Defaults.ConcurrencyLevel, capacity, new ThreadPoolScheduler(), EqualityComparer.Default, BufferConfiguration.CreateDefault(Defaults.ConcurrencyLevel, capacity)) + : this(Defaults.ConcurrencyLevel, capacity, new ThreadPoolScheduler(), EqualityComparer.Default, LfuBufferSize.Default(Defaults.ConcurrencyLevel, capacity)) { } - public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer, BufferConfiguration bufferConfiguration) + public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer, LfuBufferSize bufferConfiguration) { this.dictionary = new ConcurrentDictionary>(concurrencyLevel, capacity, comparer); - this.readBuffer = new StripedMpscBuffer>(bufferConfiguration.ReadBufferStripes, bufferConfiguration.ReadBufferSize); - this.writeBuffer = new StripedMpscBuffer>(bufferConfiguration.WriteBufferStripes, bufferConfiguration.WriteBufferSize); + this.readBuffer = new StripedMpscBuffer>(bufferConfiguration.Read); + this.writeBuffer = new StripedMpscBuffer>(bufferConfiguration.Write); this.cmSketch = new CmSketch(1, comparer); this.cmSketch.EnsureCapacity(capacity); diff --git a/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs b/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs index fafe2c41..d5215f8b 100644 --- a/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs +++ b/BitFaster.Caching/Lfu/ConcurrentLfuBuilder.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Text; + using BitFaster.Caching.Lfu.Builder; namespace BitFaster.Caching.Lfu @@ -22,7 +20,6 @@ internal ConcurrentLfuBuilder(LfuInfo info) public override ICache Build() { - // TODO: key comparer return new ConcurrentLfu(info.ConcurrencyLevel, info.Capacity, info.Scheduler, info.KeyComparer, info.BufferConfiguration); } } diff --git a/BitFaster.Caching/Lfu/LfuBufferSize.cs b/BitFaster.Caching/Lfu/LfuBufferSize.cs new file mode 100644 index 00000000..a7c16c20 --- /dev/null +++ b/BitFaster.Caching/Lfu/LfuBufferSize.cs @@ -0,0 +1,63 @@ + +using System; +using BitFaster.Caching.Buffers; + +namespace BitFaster.Caching.Lfu +{ + public class LfuBufferSize + { + public const int DefaultBufferSize = 128; + + private const int MaxWriteBufferTotalSize = 1024; + + public LfuBufferSize(StripedBufferSize readBufferSize, StripedBufferSize writeBufferSize) + { + Read = readBufferSize ?? throw new ArgumentNullException(nameof(readBufferSize)); + Write = writeBufferSize ?? throw new ArgumentNullException(nameof(writeBufferSize)); + } + + /// + /// Gets the read buffer size. + /// + public StripedBufferSize Read { get; } + + /// + /// Gets the write buffer size. + /// + public StripedBufferSize Write { get; } + + /// + /// Estimates default buffer sizes intended to give optimal throughput. + /// + /// The estimated number of threads that will use the cache concurrently. + /// The capacity of the cache. The size of the write buffer is constained to avoid the cache growing to greater than 2x capacity while writes are buffered. + /// An LruBufferSize + public static LfuBufferSize Default(int concurrencyLevel, int capacity) + { + if (capacity < 13) + { + return new LfuBufferSize( + new StripedBufferSize(32, 1), + new StripedBufferSize(16, 1)); + } + + // cap concurrency at proc count * 2 + concurrencyLevel = Math.Min(BitOps.CeilingPowerOfTwo(concurrencyLevel), BitOps.CeilingPowerOfTwo(Environment.ProcessorCount * 2)); + + // cap read buffer at aprrox 10x total capacity + while (concurrencyLevel * DefaultBufferSize > BitOps.CeilingPowerOfTwo(capacity * 10)) + { + concurrencyLevel /= 2; + } + + // Constrain write buffer size so that the LFU dictionary will not ever end up with more than 2x cache + // capacity entries before maintenance runs. + int writeBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), MaxWriteBufferTotalSize); + int writeStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(writeBufferTotalSize / concurrencyLevel, 4)), 128); + + return new LfuBufferSize( + new StripedBufferSize(DefaultBufferSize, concurrencyLevel), + new StripedBufferSize(writeStripeSize, concurrencyLevel)); + } + } +} diff --git a/BitFaster.Caching/Scheduler/BackgroundThreadScheduler.cs b/BitFaster.Caching/Scheduler/BackgroundThreadScheduler.cs index 50786236..7bb1bbdc 100644 --- a/BitFaster.Caching/Scheduler/BackgroundThreadScheduler.cs +++ b/BitFaster.Caching/Scheduler/BackgroundThreadScheduler.cs @@ -45,24 +45,11 @@ public BackgroundThreadScheduler() public void Run(Action action) { - BufferStatus s; - - //do - { - s = work.TryAdd(action); - } - //while (s == Status.Contended); - - if (s == BufferStatus.Success) + if (work.TryAdd(action) == BufferStatus.Success) { - semaphore.Release(); count++; } - else - { - throw new InvalidOperationException($"More than {MaxBacklog} tasks scheduled"); - } } private async Task Background() From ef87930eb5ae87922962ee1679ceeb180b14b8f1 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Fri, 2 Sep 2022 16:38:40 -0700 Subject: [PATCH 3/6] skip --- .../BitFaster.Caching.UnitTests.csproj | 1 + .../Lfu/BufferConfigurationTests.cs | 31 ------------------- 2 files changed, 1 insertion(+), 31 deletions(-) delete mode 100644 BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs diff --git a/BitFaster.Caching.UnitTests/BitFaster.Caching.UnitTests.csproj b/BitFaster.Caching.UnitTests/BitFaster.Caching.UnitTests.csproj index 49669cc0..56b5f445 100644 --- a/BitFaster.Caching.UnitTests/BitFaster.Caching.UnitTests.csproj +++ b/BitFaster.Caching.UnitTests/BitFaster.Caching.UnitTests.csproj @@ -17,6 +17,7 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs b/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs deleted file mode 100644 index d9a79972..00000000 --- a/BitFaster.Caching.UnitTests/Lfu/BufferConfigurationTests.cs +++ /dev/null @@ -1,31 +0,0 @@ -using BitFaster.Caching.Lfu; -using FluentAssertions; -using Xunit; - -namespace BitFaster.Caching.UnitTests.Lfu -{ - public class BufferConfigurationTests - { - [Theory] - [InlineData(1, 3, 1, 32, 1, 16)] - [InlineData(1, 14, 1, 128, 1, 16)] - [InlineData(1, 50, 1, 128, 1, 64)] - [InlineData(1, 100, 1, 128, 1, 128)] - [InlineData(4, 100, 4, 128, 4, 32)] - [InlineData(16, 100, 8, 128, 8, 16)] - [InlineData(64, 100, 8, 128, 8, 16)] - [InlineData(1, 1000, 1, 128, 1, 128)] - [InlineData(4, 1000, 4, 128, 4, 128)] - [InlineData(32, 1000, 32, 128, 32, 32)] - [InlineData(256, 100000, 32, 128, 32, 32)] - public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer) - { - var bufferSize = LfuBufferSize.Default(concurrencyLevel, capacity); - - bufferSize.Read.StripeCount.Should().Be(expectedReadStripes); - bufferSize.Read.BufferSize.Should().Be(expectedReadBuffer); - bufferSize.Write.StripeCount.Should().Be(expecteWriteStripes); - bufferSize.Write.BufferSize.Should().Be(expecteWriteBuffer); - } - } -} From de72404cfed8659b8ca0c86850741b23fecfc20d Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Fri, 2 Sep 2022 16:39:32 -0700 Subject: [PATCH 4/6] missing file --- .../Lfu/LfuBufferSizeTests.cs | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs diff --git a/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs b/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs new file mode 100644 index 00000000..14c1221c --- /dev/null +++ b/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs @@ -0,0 +1,38 @@ +using System; +using BitFaster.Caching.Lfu; +using FluentAssertions; +using Xunit; + +namespace BitFaster.Caching.UnitTests.Lfu +{ + public class LfuBufferSizeTests + { + [SkippableTheory] + [InlineData(1, 3, 1, 32, 1, 16)] + [InlineData(1, 14, 1, 128, 1, 16)] + [InlineData(1, 50, 1, 128, 1, 64)] + [InlineData(1, 100, 1, 128, 1, 128)] + [InlineData(4, 100, 4, 128, 4, 32)] + [InlineData(16, 100, 8, 128, 8, 16)] // fails win + [InlineData(64, 100, 8, 128, 8, 16)] // fails win + [InlineData(1, 1000, 1, 128, 1, 128)] + [InlineData(4, 1000, 4, 128, 4, 128)] + [InlineData(32, 1000, 32, 128, 32, 32)] // fails win + fails mac + [InlineData(256, 100000, 32, 128, 32, 32)] // fails win + fails mac + public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer) + { + // Some of these tests depend on the CPU Core count - skip if run on a different config machine. + bool notExpectedCpuCount = Environment.ProcessorCount != 12; + bool concurrencyLevelThresholdExceeded = BitOps.CeilingPowerOfTwo(concurrencyLevel) > BitOps.CeilingPowerOfTwo(Environment.ProcessorCount * 2); + + Skip.If(concurrencyLevelThresholdExceeded && notExpectedCpuCount, "Test outcome depends on machine CPU count"); + + var bufferSize = LfuBufferSize.Default(concurrencyLevel, capacity); + + bufferSize.Read.StripeCount.Should().Be(expectedReadStripes); + bufferSize.Read.BufferSize.Should().Be(expectedReadBuffer); + bufferSize.Write.StripeCount.Should().Be(expecteWriteStripes); + bufferSize.Write.BufferSize.Should().Be(expecteWriteBuffer); + } + } +} From 1141181e99dbe733d7f15383f14e09caa23eb53b Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Fri, 2 Sep 2022 17:03:07 -0700 Subject: [PATCH 5/6] tests --- .../Lfu/LfuBufferSizeTests.cs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs b/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs index 14c1221c..8cbec085 100644 --- a/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs @@ -1,4 +1,5 @@ using System; +using BitFaster.Caching.Buffers; using BitFaster.Caching.Lfu; using FluentAssertions; using Xunit; @@ -7,6 +8,22 @@ namespace BitFaster.Caching.UnitTests.Lfu { public class LfuBufferSizeTests { + [Fact] + public void WhenReadBufferIsNullThrows() + { + Action constructor = () => { var x = new LfuBufferSize(null, new StripedBufferSize(1, 1)); }; + + constructor.Should().Throw(); + } + + [Fact] + public void WhenWriteBufferIsNullThrows() + { + Action constructor = () => { var x = new LfuBufferSize(new StripedBufferSize(1, 1), null); }; + + constructor.Should().Throw(); + } + [SkippableTheory] [InlineData(1, 3, 1, 32, 1, 16)] [InlineData(1, 14, 1, 128, 1, 16)] From d9410db3ce442f39c2e2331f2e1bd176338c9576 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Fri, 2 Sep 2022 17:07:18 -0700 Subject: [PATCH 6/6] rename --- BitFaster.Caching/Lfu/ConcurrentLfu.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/BitFaster.Caching/Lfu/ConcurrentLfu.cs b/BitFaster.Caching/Lfu/ConcurrentLfu.cs index 621f3e55..c8eba2d0 100644 --- a/BitFaster.Caching/Lfu/ConcurrentLfu.cs +++ b/BitFaster.Caching/Lfu/ConcurrentLfu.cs @@ -65,12 +65,12 @@ public ConcurrentLfu(int capacity) { } - public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer, LfuBufferSize bufferConfiguration) + public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, IEqualityComparer comparer, LfuBufferSize bufferSize) { this.dictionary = new ConcurrentDictionary>(concurrencyLevel, capacity, comparer); - this.readBuffer = new StripedMpscBuffer>(bufferConfiguration.Read); - this.writeBuffer = new StripedMpscBuffer>(bufferConfiguration.Write); + this.readBuffer = new StripedMpscBuffer>(bufferSize.Read); + this.writeBuffer = new StripedMpscBuffer>(bufferSize.Write); this.cmSketch = new CmSketch(1, comparer); this.cmSketch.EnsureCapacity(capacity);