From 81ade80c17da5cc7aada02c675aca3a1065d529e Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Sun, 25 Sep 2022 16:34:34 -0700 Subject: [PATCH 1/3] size --- .../Arc/RunnerConfig.cs | 4 +- .../Zipfian/Runner.cs | 4 +- ...itFaster.Caching.ThroughputAnalysis.csproj | 3 ++ .../CommandParser.cs | 33 ++++++++++++++++ .../ConfigFactory.cs | 38 ++++++++++++++----- .../Exporter.cs | 4 +- .../Program.cs | 12 +----- .../Runner.cs | 21 +++++----- .../SizeExec.bat | 9 +++++ .../ThroughputBenchConfig.cs | 10 +++-- 10 files changed, 98 insertions(+), 40 deletions(-) create mode 100644 BitFaster.Caching.ThroughputAnalysis/CommandParser.cs create mode 100644 BitFaster.Caching.ThroughputAnalysis/SizeExec.bat diff --git a/BitFaster.Caching.HitRateAnalysis/Arc/RunnerConfig.cs b/BitFaster.Caching.HitRateAnalysis/Arc/RunnerConfig.cs index fcf08f7b..9700572e 100644 --- a/BitFaster.Caching.HitRateAnalysis/Arc/RunnerConfig.cs +++ b/BitFaster.Caching.HitRateAnalysis/Arc/RunnerConfig.cs @@ -25,8 +25,8 @@ public RunnerConfig(string name, int[] cacheSizes, Uri dataUri) public ArcDataFile File => this.file; - public static RunnerConfig Database = new RunnerConfig("results.arc.database.csv", new[] { 1000000, 2000000, 3000000, 4000000, 5000000, 6000000, 7000000, 8000000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/DS1.lis.gz")); - public static RunnerConfig Search = new RunnerConfig("results.arc.search.csv", new[] { 100000, 200000, 300000, 400000, 500000, 600000, 700000, 800000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/S3.lis.gz")); + public static RunnerConfig Database = new RunnerConfig("results.arc.database.csv", new[] { 1_000_000, 2_000_000, 3_000_000, 4_000_000, 5_000_000, 6_000_000, 7_000_000, 8_000_000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/DS1.lis.gz")); + public static RunnerConfig Search = new RunnerConfig("results.arc.search.csv", new[] { 100_000, 200_000, 300_000, 400_000, 500_000, 600_000, 700_000, 800_000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/S3.lis.gz")); public static RunnerConfig Oltp = new RunnerConfig("results.arc.oltp.csv", new[] { 250, 500, 750, 1000, 1250, 1500, 1750, 2000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/OLTP.lis.gz")); } } diff --git a/BitFaster.Caching.HitRateAnalysis/Zipfian/Runner.cs b/BitFaster.Caching.HitRateAnalysis/Zipfian/Runner.cs index ace1c069..82cf72ac 100644 --- a/BitFaster.Caching.HitRateAnalysis/Zipfian/Runner.cs +++ b/BitFaster.Caching.HitRateAnalysis/Zipfian/Runner.cs @@ -21,12 +21,12 @@ public class Runner // distribution (about 45 / 20). // Took 1 million samples - const int sampleCount = 1000000; + const int sampleCount = 1_000_000; // Simulated a database of 50,000 pages and // buffer sizes ranging from 2,500 (5%) items to 20,000 // (40%) items. - const int n = 50000; + const int n = 50_000; public static void Run() { diff --git a/BitFaster.Caching.ThroughputAnalysis/BitFaster.Caching.ThroughputAnalysis.csproj b/BitFaster.Caching.ThroughputAnalysis/BitFaster.Caching.ThroughputAnalysis.csproj index 1d79ef6e..1edfd2e6 100644 --- a/BitFaster.Caching.ThroughputAnalysis/BitFaster.Caching.ThroughputAnalysis.csproj +++ b/BitFaster.Caching.ThroughputAnalysis/BitFaster.Caching.ThroughputAnalysis.csproj @@ -33,6 +33,9 @@ PreserveNewest + + PreserveNewest + diff --git a/BitFaster.Caching.ThroughputAnalysis/CommandParser.cs b/BitFaster.Caching.ThroughputAnalysis/CommandParser.cs new file mode 100644 index 00000000..f8ba2409 --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/CommandParser.cs @@ -0,0 +1,33 @@ + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public class CommandParser + { + public static (Mode, int) Parse(string[] args) + { + // arg[0] == mode, arg[1] == size + if (args.Length == 2) + { + if (int.TryParse(args[0], out int modeArg)) + { + if (int.TryParse(args[1], out int size)) + { + return ((Mode)modeArg, size); + } + } + } + + Mode mode = Mode.Read; + var menu = new EasyConsole.Menu() + .Add("Read", () => mode = Mode.Read) + .Add("Read + Write", () => mode = Mode.ReadWrite) + .Add("Update", () => mode = Mode.Update) + .Add("Evict", () => mode = Mode.Evict) + .Add("All", () => mode = Mode.All); + + menu.Display(); + + return (mode, 500); + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs b/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs index acb4d975..4feaf93f 100644 --- a/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs +++ b/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs @@ -8,27 +8,47 @@ namespace BitFaster.Caching.ThroughputAnalysis { public class ConfigFactory { - const double s = 0.86; // Zipf s parameter, controls distribution - const int n = 500; // number of unique items for Zipf - const int maxThreads = 52; - const int sampleCount = 2000; + const double s = 0.86; // Zipf s parameter, controls distribution - public static (ThroughputBenchmarkBase, IThroughputBenchConfig, int) Create(Mode mode, int repeatCount) + public static (ThroughputBenchmarkBase, IThroughputBenchConfig, int) Create(Mode mode, int cacheSize, int maxThreads) { + int iterations = GetIterationCount(cacheSize); + int samples = GetSampleCount(cacheSize); + int n = cacheSize; // number of unique items for Zipf + switch (mode) { case Mode.Read: - return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n); + return (new ReadThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize); case Mode.ReadWrite: // cache holds 10% of all items - return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n / 10); + cacheSize = cacheSize / 10; + return (new ReadThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize); case Mode.Update: - return (new UpdateThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n); + return (new UpdateThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize); case Mode.Evict: - return (new ReadThroughputBenchmark(), new EvictionConfig(repeatCount, sampleCount, maxThreads), n); + return (new ReadThroughputBenchmark(), new EvictionConfig(iterations, samples, maxThreads), cacheSize); } throw new InvalidOperationException(); } + + private static int GetIterationCount(int cacheSize) => cacheSize switch + { + < 500 => 400, + < 5_000 => 200, + < 10_000 => 100, + < 100_000 => 50, + < 1_000_000 => 25, + < 10_000_000 => 5, + _ => 1 + }; + + private static int GetSampleCount(int cacheSize) => cacheSize switch + { + < 5_000 => cacheSize * 4, + < 5_000_000 => cacheSize * 2, + _ => cacheSize + }; } } diff --git a/BitFaster.Caching.ThroughputAnalysis/Exporter.cs b/BitFaster.Caching.ThroughputAnalysis/Exporter.cs index 4ff9e597..37c2bdbd 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Exporter.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Exporter.cs @@ -46,9 +46,9 @@ public void CaptureRows(IEnumerable caches) } } - public void ExportCsv(Mode mode) + public void ExportCsv(Mode mode, int cacheSize) { - using (var textWriter = File.CreateText($"Results{mode}.csv")) + using (var textWriter = File.CreateText($"Results_{mode}_{cacheSize}.csv")) using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture)) { foreach (DataColumn column in resultTable.Columns) diff --git a/BitFaster.Caching.ThroughputAnalysis/Program.cs b/BitFaster.Caching.ThroughputAnalysis/Program.cs index 4a4889a2..ec7c1f04 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Program.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Program.cs @@ -3,15 +3,7 @@ Host.PrintInfo(); -Mode mode = Mode.Read; +var (mode, size) = CommandParser.Parse(args); -var menu = new EasyConsole.Menu() - .Add("Read", () => mode = Mode.Read) - .Add("Read + Write", () => mode = Mode.ReadWrite) - .Add("Update", () => mode = Mode.Update) - .Add("Evict", () => mode = Mode.Evict) - .Add("All", () => mode = Mode.All); - -menu.Display(); -Runner.Run(mode); +Runner.Run(mode, size); Console.WriteLine("Done."); diff --git a/BitFaster.Caching.ThroughputAnalysis/Runner.cs b/BitFaster.Caching.ThroughputAnalysis/Runner.cs index 8d6a2785..0b7d3729 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Runner.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Runner.cs @@ -8,9 +8,8 @@ namespace BitFaster.Caching.ThroughputAnalysis public class Runner { private static readonly int maxThreads = Host.GetAvailableCoreCount() * 2; - private const int repeatCount = 400; - public static void Run(Mode mode) + public static void Run(Mode mode, int cacheSize) { ThreadPool.SetMinThreads(maxThreads, maxThreads); @@ -18,29 +17,29 @@ public static void Run(Mode mode) { if (mode.HasFlag(value) && value != Mode.All) { - RunTest(value); + RunTest(value, cacheSize); } } } - private static void RunTest(Mode mode) + private static void RunTest(Mode mode, int cacheSize) { Console.WriteLine("Generating input distribution..."); - var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, repeatCount); + var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, cacheSize, maxThreads); var cachesToTest = new List(); - cachesToTest.Add(new ClassicLruFactory(capacity)); - cachesToTest.Add(new MemoryCacheFactory(capacity)); - cachesToTest.Add(new FastConcurrentLruFactory(capacity)); - cachesToTest.Add(new ConcurrentLruFactory(capacity)); + //cachesToTest.Add(new ClassicLruFactory(capacity)); + //cachesToTest.Add(new MemoryCacheFactory(capacity)); + //cachesToTest.Add(new FastConcurrentLruFactory(capacity)); + //cachesToTest.Add(new ConcurrentLruFactory(capacity)); cachesToTest.Add(new ConcurrentLfuFactory(capacity)); var exporter = new Exporter(maxThreads); exporter.Initialize(cachesToTest); Console.WriteLine(); - Console.WriteLine($"Running {mode}..."); + Console.WriteLine($"Running {mode} with size {cacheSize} over {maxThreads} threads..."); Console.WriteLine(); foreach (int tc in Enumerable.Range(1, maxThreads).ToArray()) @@ -63,7 +62,7 @@ private static void RunTest(Mode mode) exporter.CaptureRows(cachesToTest); - exporter.ExportCsv(mode); + exporter.ExportCsv(mode, cacheSize); //ConsoleTable // .From(resultTable) diff --git a/BitFaster.Caching.ThroughputAnalysis/SizeExec.bat b/BitFaster.Caching.ThroughputAnalysis/SizeExec.bat new file mode 100644 index 00000000..df62656a --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/SizeExec.bat @@ -0,0 +1,9 @@ +cls + +@echo off +set DOTNET_Thread_UseAllCpuGroups=1 + +call BitFaster.Caching.ThroughputAnalysis.exe 4 100 +call BitFaster.Caching.ThroughputAnalysis.exe 4 10000 +call BitFaster.Caching.ThroughputAnalysis.exe 4 1000000 +call BitFaster.Caching.ThroughputAnalysis.exe 4 10000000 \ No newline at end of file diff --git a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs index c4a56e0e..7b6fcd2b 100644 --- a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs +++ b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs @@ -47,9 +47,11 @@ public class EvictionConfig : IThroughputBenchConfig private int[][] samples; + const int maxSamples = 10_000_000; + public EvictionConfig(int iterations, int sampleCount, int threadCount) { - if (sampleCount > 100000) + if (sampleCount > maxSamples) { throw new ArgumentOutOfRangeException("Sample count too large, will result in overlap"); } @@ -57,10 +59,10 @@ public EvictionConfig(int iterations, int sampleCount, int threadCount) this.iterations = iterations; samples = new int[threadCount][]; - for (int i = 0; i < threadCount; i++) + Parallel.ForEach(Enumerable.Range(0, threadCount), i => { - samples[i] = Enumerable.Range(i * 100000, sampleCount).ToArray(); - } + samples[i] = Enumerable.Range(i * maxSamples, sampleCount).ToArray(); + }); } public int Iterations => iterations; From e83a13bfe8bc316042298c44443fa5354c3d3a4d Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Sun, 25 Sep 2022 16:35:58 -0700 Subject: [PATCH 2/3] runner --- BitFaster.Caching.ThroughputAnalysis/Runner.cs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/BitFaster.Caching.ThroughputAnalysis/Runner.cs b/BitFaster.Caching.ThroughputAnalysis/Runner.cs index 0b7d3729..f0e496fd 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Runner.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Runner.cs @@ -29,10 +29,10 @@ private static void RunTest(Mode mode, int cacheSize) var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, cacheSize, maxThreads); var cachesToTest = new List(); - //cachesToTest.Add(new ClassicLruFactory(capacity)); - //cachesToTest.Add(new MemoryCacheFactory(capacity)); - //cachesToTest.Add(new FastConcurrentLruFactory(capacity)); - //cachesToTest.Add(new ConcurrentLruFactory(capacity)); + cachesToTest.Add(new ClassicLruFactory(capacity)); + cachesToTest.Add(new MemoryCacheFactory(capacity)); + cachesToTest.Add(new FastConcurrentLruFactory(capacity)); + cachesToTest.Add(new ConcurrentLruFactory(capacity)); cachesToTest.Add(new ConcurrentLfuFactory(capacity)); var exporter = new Exporter(maxThreads); From abf693fb975e640fa8d1a62968868a3d9f644224 Mon Sep 17 00:00:00 2001 From: Alex Peck Date: Sun, 25 Sep 2022 16:40:43 -0700 Subject: [PATCH 3/3] test constants --- .../Wikibench/Runner.cs | 2 +- .../ThroughputBenchmark.cs | 2 +- .../Counters/StripedLongAdderTests.cs | 4 ++-- .../Lfu/ConcurrentLfuTests.cs | 16 ++++++++-------- .../SingletonCacheTests.cs | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/BitFaster.Caching.HitRateAnalysis/Wikibench/Runner.cs b/BitFaster.Caching.HitRateAnalysis/Wikibench/Runner.cs index 3dbf2b09..d41a2b82 100644 --- a/BitFaster.Caching.HitRateAnalysis/Wikibench/Runner.cs +++ b/BitFaster.Caching.HitRateAnalysis/Wikibench/Runner.cs @@ -38,7 +38,7 @@ public static async Task Run() a.TestKey(url); } - if (count++ % 100000 == 0) + if (count++ % 100_000 == 0) { Console.WriteLine($"Processed {count} URIs..."); } diff --git a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs index 6a5d5ca1..99574557 100644 --- a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs +++ b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs @@ -23,7 +23,7 @@ public double Run(int warmup, int runs, int threads, IThroughputBenchConfig conf } // return million ops/sec - const int oneMillion = 1000000; + const int oneMillion = 1_000_000; return AverageLast(results, runs) / oneMillion; } diff --git a/BitFaster.Caching.UnitTests/Counters/StripedLongAdderTests.cs b/BitFaster.Caching.UnitTests/Counters/StripedLongAdderTests.cs index 4d5e0fef..d48f2f12 100644 --- a/BitFaster.Caching.UnitTests/Counters/StripedLongAdderTests.cs +++ b/BitFaster.Caching.UnitTests/Counters/StripedLongAdderTests.cs @@ -30,13 +30,13 @@ public async Task WhenAddingConcurrentlySumIsCorrect() await Threaded.Run(4, () => { - for (int i = 0; i < 100000; i++) + for (int i = 0; i < 100_000; i++) { adder.Increment(); } }); - adder.Count().Should().Be(400000); + adder.Count().Should().Be(400_000); } } } diff --git a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs index cbc737a9..5a3c0048 100644 --- a/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs +++ b/BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs @@ -687,7 +687,7 @@ public void TrimWhileItemsInWriteBufferRemovesNItems() public void VerifyHitsWithBackgroundScheduler() { // when running all tests in parallel, sample count drops significantly: set low bar for stability. - VerifyHits(iterations: 10000000, minSamples: 250000); + VerifyHits(iterations: 10_000_000, minSamples: 250_000); } //Elapsed 590.8154ms - 0.0005908154ns/op @@ -698,7 +698,7 @@ public void VerifyHitsWithThreadPoolScheduler() { // when running all tests in parallel, sample count drops significantly: set low bar for stability. cache = new ConcurrentLfu(1, 20, new ThreadPoolScheduler(), EqualityComparer.Default); - VerifyHits(iterations: 10000000, minSamples: 500000); + VerifyHits(iterations: 10_000_000, minSamples: 500_000); } //Elapsed 273.0148ms - 0.0002730148ns/op @@ -708,7 +708,7 @@ public void VerifyHitsWithThreadPoolScheduler() public void VerifyHitsWithNullScheduler() { cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); - VerifyHits(iterations: 10000000, minSamples: -1); + VerifyHits(iterations: 10_000_000, minSamples: -1); } //Will drop 78125 reads. @@ -722,7 +722,7 @@ public void VerifyHitsWithForegroundScheduler() // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false // before TryScheduleDrain is called. This serves as sanity check. - int iterations = 10000000; + int iterations = 10_000_000; int dropped = iterations / ConcurrentLfu.DefaultBufferSize; this.output.WriteLine($"Will drop {dropped} reads."); @@ -735,7 +735,7 @@ public void VerifyMisses() { cache = new ConcurrentLfu(1, 20, new BackgroundThreadScheduler(), EqualityComparer.Default); - int iterations = 100000; + int iterations = 100_000; Func func = x => x; var start = Stopwatch.GetTimestamp(); @@ -751,7 +751,7 @@ public void VerifyMisses() var totalTicks = end - start; var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; - var timeNs = timeMs / 1000000; + var timeNs = timeMs / 1_000_000; var timePerOp = timeMs / (double)iterations; var samplePercent = this.cache.Metrics.Value.Misses / (double)iterations * 100; @@ -770,7 +770,7 @@ public async Task ThreadedVerifyMisses() cache = new ConcurrentLfu(1, 20, new NullScheduler(), EqualityComparer.Default); int threads = 4; - int iterations = 100000; + int iterations = 100_000; await Threaded.Run(threads, i => { @@ -808,7 +808,7 @@ private void VerifyHits(int iterations, int minSamples) var totalTicks = end - start; var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0; - var timeNs = timeMs / 1000000; + var timeNs = timeMs / 1_000_000; var timePerOp = timeMs / (double)iterations; var samplePercent = this.cache.Metrics.Value.Hits / (double)iterations * 100; diff --git a/BitFaster.Caching.UnitTests/SingletonCacheTests.cs b/BitFaster.Caching.UnitTests/SingletonCacheTests.cs index 5377aab4..d2871f17 100644 --- a/BitFaster.Caching.UnitTests/SingletonCacheTests.cs +++ b/BitFaster.Caching.UnitTests/SingletonCacheTests.cs @@ -123,7 +123,7 @@ public async Task AcquireWithSameKeyOnManyDifferentThreadsReturnsSameValue() { tasks[concurrency] = Task.Run(() => { - for (int i = 0; i < 100000; i++) + for (int i = 0; i < 100_000; i++) { using (var lifetime = cache.Acquire("Foo")) {