diff --git a/BitFaster.Caching.ThroughputAnalysis/CacheFactory.cs b/BitFaster.Caching.ThroughputAnalysis/CacheFactory.cs new file mode 100644 index 00000000..077fbedb --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/CacheFactory.cs @@ -0,0 +1,131 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using BitFaster.Caching.Lfu; +using BitFaster.Caching.Lru; +using BitFaster.Caching.Scheduler; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public interface ICacheFactory + { + (IScheduler, ICache) Create(int threadCount); + + public string Name { get; } + + DataRow DataRow { get; set; } + } + + public class FastConcurrentLruFactory : ICacheFactory + { + private int capacity; + + public FastConcurrentLruFactory(int capacity) + { + this.capacity = capacity; + } + + public string Name => "FsTConcLRU"; + + public DataRow DataRow { get; set; } + + public (IScheduler, ICache) Create(int threadCount) + { + var cache = new FastConcurrentLru(threadCount, capacity, EqualityComparer.Default); + + return (null, cache); + } + } + + public class ConcurrentLruFactory : ICacheFactory + { + private int capacity; + + public ConcurrentLruFactory(int capacity) + { + this.capacity = capacity; + } + + public string Name => "ConcurrLRU"; + + public DataRow DataRow { get; set; } + + public (IScheduler, ICache) Create(int threadCount) + { + var cache = new ConcurrentLru(threadCount, capacity, EqualityComparer.Default); + + return (null, cache); + } + } + + public class MemoryCacheFactory : ICacheFactory + { + private int capacity; + + public MemoryCacheFactory(int capacity) + { + this.capacity = capacity; + } + + public string Name => "MemryCache"; + + public DataRow DataRow { get; set; } + + public (IScheduler, ICache) Create(int threadCount) + { + var cache = new MemoryCacheAdaptor(capacity); + + return (null, cache); + } + } + + public class ConcurrentLfuFactory : ICacheFactory + { + private int capacity; + + public ConcurrentLfuFactory(int capacity) + { + this.capacity = capacity; + } + + public string Name => "ConcurrLFU"; + + public DataRow DataRow { get; set; } + + public (IScheduler, ICache) Create(int threadCount) + { + var scheduler = new BackgroundThreadScheduler(); + var cache = new ConcurrentLfu( + concurrencyLevel: threadCount, + capacity: capacity, + scheduler: scheduler, + EqualityComparer.Default); + + return (scheduler, cache); + } + } + + public class ClassicLruFactory : ICacheFactory + { + private int capacity; + + public ClassicLruFactory(int capacity) + { + this.capacity = capacity; + } + + public string Name => "ClassicLru"; + + public DataRow DataRow { get; set; } + + public (IScheduler, ICache) Create(int threadCount) + { + var cache = new ClassicLru(threadCount, capacity, EqualityComparer.Default); + + return (null, cache); + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs b/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs new file mode 100644 index 00000000..acb4d975 --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public class ConfigFactory + { + const double s = 0.86; // Zipf s parameter, controls distribution + const int n = 500; // number of unique items for Zipf + const int maxThreads = 52; + const int sampleCount = 2000; + + public static (ThroughputBenchmarkBase, IThroughputBenchConfig, int) Create(Mode mode, int repeatCount) + { + switch (mode) + { + case Mode.Read: + return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n); + case Mode.ReadWrite: + // cache holds 10% of all items + return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n / 10); + case Mode.Update: + return (new UpdateThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n); + case Mode.Evict: + return (new ReadThroughputBenchmark(), new EvictionConfig(repeatCount, sampleCount, maxThreads), n); + } + + throw new InvalidOperationException(); + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/Exporter.cs b/BitFaster.Caching.ThroughputAnalysis/Exporter.cs new file mode 100644 index 00000000..4ff9e597 --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/Exporter.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using CsvHelper; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public class Exporter + { + DataTable resultTable = new DataTable(); + + public Exporter(int maxThreads) + { + // output: + // ThreadCount 1 2 3 4 5 + // Classic 5 6 7 7 8 + // Concurrent 5 6 7 7 8 + + resultTable.Clear(); + resultTable.Columns.Add("ThreadCount"); + foreach (var tc in Enumerable.Range(1, maxThreads).ToArray()) + { + resultTable.Columns.Add(tc.ToString()); + } + } + + public void Initialize(IEnumerable caches) + { + foreach (var c in caches) + { + c.DataRow = resultTable.NewRow(); + c.DataRow["ThreadCount"] = c.Name; + } + } + + public void CaptureRows(IEnumerable caches) + { + foreach (var c in caches) + { + resultTable.Rows.Add(c.DataRow); + } + } + + public void ExportCsv(Mode mode) + { + using (var textWriter = File.CreateText($"Results{mode}.csv")) + using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture)) + { + foreach (DataColumn column in resultTable.Columns) + { + csv.WriteField(column.ColumnName); + } + csv.NextRecord(); + + foreach (DataRow row in resultTable.Rows) + { + for (var i = 0; i < resultTable.Columns.Count; i++) + { + csv.WriteField(row[i]); + } + csv.NextRecord(); + } + } + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/MemoryCacheAdaptor.cs b/BitFaster.Caching.ThroughputAnalysis/MemoryCacheAdaptor.cs index 0293b3d5..ca0802a9 100644 --- a/BitFaster.Caching.ThroughputAnalysis/MemoryCacheAdaptor.cs +++ b/BitFaster.Caching.ThroughputAnalysis/MemoryCacheAdaptor.cs @@ -31,9 +31,11 @@ public MemoryCacheAdaptor(int capacity) public ICollection Keys => throw new NotImplementedException(); + private static readonly MemoryCacheEntryOptions SizeOne = new MemoryCacheEntryOptions() { Size = 1 }; + public void AddOrUpdate(K key, V value) { - throw new NotImplementedException(); + exMemoryCache.Set(key, value, SizeOne); } public void Clear() @@ -48,7 +50,6 @@ public IEnumerator> GetEnumerator() public V GetOrAdd(K key, Func valueFactory) { - if (!exMemoryCache.TryGetValue(key, out object result)) { using ICacheEntry entry = exMemoryCache.CreateEntry(key); diff --git a/BitFaster.Caching.ThroughputAnalysis/Mode.cs b/BitFaster.Caching.ThroughputAnalysis/Mode.cs new file mode 100644 index 00000000..50056795 --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/Mode.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public enum Mode + { + Read, + ReadWrite, + Evict, + Update, + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/ParallelBenchmark.cs b/BitFaster.Caching.ThroughputAnalysis/ParallelBenchmark.cs new file mode 100644 index 00000000..ff50da2f --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/ParallelBenchmark.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public class ParallelBenchmark + { + public static TimeSpan Run(Action action, int threads) + { + Task[] tasks = new Task[threads]; + ManualResetEventSlim mre = new ManualResetEventSlim(); + + Action syncStart = taskId => + { + mre.Wait(); + action(taskId); + }; + + for (int i = 0; i < tasks.Length; i++) + { + int index = i; + tasks[i] = Task.Factory.StartNew(() => syncStart(index), TaskCreationOptions.LongRunning); + } + + // try to mitigate spam from MemoryCache + for (int i = 0; i < 3; i++) + { + GC.Collect(); + } + + var sw = Stopwatch.StartNew(); + mre.Set(); + Task.WaitAll(tasks); + return sw.Elapsed; + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/Program.cs b/BitFaster.Caching.ThroughputAnalysis/Program.cs index 04e2168d..e955eeec 100644 --- a/BitFaster.Caching.ThroughputAnalysis/Program.cs +++ b/BitFaster.Caching.ThroughputAnalysis/Program.cs @@ -1,120 +1,69 @@ using System; using System.Collections.Generic; -using System.Data; -using System.Diagnostics; -using System.Globalization; -using System.IO; using System.Linq; using System.Threading; -using System.Threading.Tasks; using BenchmarkDotNet.Environments; -using BitFaster.Caching.Lfu; -using BitFaster.Caching.Lru; -using BitFaster.Caching.Scheduler; -using ConsoleTables; -using CsvHelper; -using MathNet.Numerics.Distributions; namespace BitFaster.Caching.ThroughputAnalysis { class Program { - const double s = 0.86; - const int n = 500; - static int capacity = 500; - const int maxThreads = 52; - const int sampleCount = 2000; - const int repeatCount = 400; - - private static int[] samples = new int[sampleCount]; + private static readonly int maxThreads = Environment.ProcessorCount * 2; + private const int repeatCount = 400; static void Main(string[] args) { - ThreadPool.SetMaxThreads(maxThreads, maxThreads); + ThreadPool.SetMinThreads(maxThreads, maxThreads); PrintHostInfo(); + Mode mode = Mode.Read; + var menu = new EasyConsole.Menu() - .Add("Read", () => capacity = n) - .Add("Read + Write", () => capacity = n / 10); + .Add("Read", () => mode = Mode.Read) + .Add("Read + Write", () => mode = Mode.ReadWrite) + .Add("Update", () => mode = Mode.Update) + .Add("Evict", () => mode = Mode.Evict); menu.Display(); Console.WriteLine("Generating input distribution..."); - samples = new int[sampleCount]; - Zipf.Samples(samples, s, n); - - int[] threadCount = Enumerable.Range(1, maxThreads).ToArray(); - - // Desired output: - // Class 1 2 3 4 5 - // Classic 5 6 7 7 8 - // Concurrent 5 6 7 7 8 - DataTable resultTable = new DataTable(); - resultTable.Clear(); - resultTable.Columns.Add("Class"); - foreach (var tc in threadCount) - { - resultTable.Columns.Add(tc.ToString()); - } - DataRow classicLru = resultTable.NewRow(); - DataRow memoryCache = resultTable.NewRow(); - DataRow concurrentLru = resultTable.NewRow(); - DataRow concurrentLfu = resultTable.NewRow(); + var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, repeatCount); + + var cachesToTest = new List(); + cachesToTest.Add(new ClassicLruFactory(capacity)); + cachesToTest.Add(new MemoryCacheFactory(capacity)); + cachesToTest.Add(new FastConcurrentLruFactory(capacity)); + cachesToTest.Add(new ConcurrentLruFactory(capacity)); + cachesToTest.Add(new ConcurrentLfuFactory(capacity)); + + var exporter = new Exporter(maxThreads); + exporter.Initialize(cachesToTest); - classicLru["Class"] = "classicLru"; - memoryCache["Class"] = "memoryCach"; - concurrentLru["Class"] = "concurrentLru"; - concurrentLfu["Class"] = "concurrentLfu"; + Console.WriteLine(); + Console.WriteLine($"Running {mode}..."); + Console.WriteLine(); - foreach (int tc in threadCount) + foreach (int tc in Enumerable.Range(1, maxThreads).ToArray()) { const int warmup = 3; const int runs = 6; - double[] results = new double[warmup + runs]; - - for (int i = 0; i < warmup + runs; i++) - { - results[i] = MeasureThroughput(new ClassicLru(tc, capacity, EqualityComparer.Default), tc); - } - double avg = AverageLast(results, runs) / 1000000; - Console.WriteLine($"ClassicLru ({tc}) {avg} million ops/sec"); - classicLru[tc.ToString()] = avg.ToString(); - - for (int i = 0; i < warmup + runs; i++) - { - results[i] = MeasureThroughput(new MemoryCacheAdaptor(capacity), tc); - } - avg = AverageLast(results, runs) / 1000000; - Console.WriteLine($"memoryCach ({tc}) {avg} million ops/sec"); - memoryCache[tc.ToString()] = avg.ToString(); - for (int i = 0; i < warmup + runs; i++) + foreach (var cacheConfig in cachesToTest) { - results[i] = MeasureThroughput(new FastConcurrentLru(tc, capacity, EqualityComparer.Default), tc); - } - avg = AverageLast(results, runs) / 1000000; - Console.WriteLine($"ConcurrLru ({tc}) {avg} million ops/sec"); - concurrentLru[tc.ToString()] = avg.ToString(); + var (sched, cache) = cacheConfig.Create(tc); + double thru = bench.Run(warmup, runs, tc, dataConfig, cache); + (sched as IDisposable)?.Dispose(); - for (int i = 0; i < warmup + runs; i++) - { - var scheduler = new BackgroundThreadScheduler(); - results[i] = MeasureThroughput(new ConcurrentLfu(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer.Default), tc); - scheduler.Dispose(); + cacheConfig.DataRow[tc.ToString()] = thru.ToString(); + Console.WriteLine($"{cacheConfig.Name} ({tc.ToString("00")}) {FormatThroughput(thru)} million ops/sec"); } - avg = AverageLast(results, runs) / 1000000; - Console.WriteLine($"ConcurrLfu ({tc}) {avg} million ops/sec"); - concurrentLfu[tc.ToString()] = avg.ToString(); } - resultTable.Rows.Add(classicLru); - resultTable.Rows.Add(memoryCache); - resultTable.Rows.Add(concurrentLru); - resultTable.Rows.Add(concurrentLfu); + exporter.CaptureRows(cachesToTest); - ExportCsv(resultTable); + exporter.ExportCsv(mode); //ConsoleTable // .From(resultTable) @@ -144,76 +93,11 @@ private static void PrintHostInfo() Console.WriteLine(); } - private static double AverageLast(double[] results, int count) - { - double result = 0; - for (int i = results.Length - count; i < results.Length; i++) - { - result += results[i]; - } - - return result / count; - } - - private static double MeasureThroughput(ICache cache, int threadCount) - { - var tasks = new Task[threadCount]; - ManualResetEvent mre = new ManualResetEvent(false); - - for (int i = 0; i < threadCount; i++) - { - tasks[i] = Task.Run(() => Test(mre, cache)); - } - - var sw = Stopwatch.StartNew(); - mre.Set(); - - Task.WaitAll(tasks); - - sw.Stop(); - - // throughput = ops/sec - return (threadCount * sampleCount * repeatCount) / sw.Elapsed.TotalSeconds; - } - - private static void Test(ManualResetEvent mre, ICache cache) - { - // cache has 50 capacity - // make zipf for 500 total items, 2000 samples - // each thread will lookup all samples 5 times in a row, for a total of 10k GetOrAdds per thread - Func func = x => x; - - mre.WaitOne(); - - for (int j = 0; j < repeatCount; j++) - { - for (int i = 0; i < sampleCount; i++) - { - cache.GetOrAdd(samples[i], func); - } - } - } - - public static void ExportCsv(DataTable results) + private static string FormatThroughput(double thru) { - using (var textWriter = File.CreateText(@"Results.csv")) - using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture)) - { - foreach (DataColumn column in results.Columns) - { - csv.WriteField(column.ColumnName); - } - csv.NextRecord(); - - foreach (DataRow row in results.Rows) - { - for (var i = 0; i < results.Columns.Count; i++) - { - csv.WriteField(row[i]); - } - csv.NextRecord(); - } - } + string dformat = "0.00;-0.00"; + string raw = thru.ToString(dformat); + return raw.PadLeft(6, ' '); } } } diff --git a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs new file mode 100644 index 00000000..c4a56e0e --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs @@ -0,0 +1,75 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using MathNet.Numerics.Distributions; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public interface IThroughputBenchConfig + { + int Iterations { get; } + + int Samples { get; } + + int[] GetTestData(int threadId); + } + + public class ZipfConfig : IThroughputBenchConfig + { + private int iterations; + private int[] samples; + + public ZipfConfig(int iterations, int sampleCount, double s, int n) + { + this.iterations = iterations; + + Random random = new Random(666); + + samples = new int[sampleCount]; + Zipf.Samples(random, samples, s, n); + } + + public int Iterations => iterations; + + public int Samples => samples.Length; + + public int[] GetTestData(int threadId) + { + return samples; + } + } + + public class EvictionConfig : IThroughputBenchConfig + { + private int iterations; + + private int[][] samples; + + public EvictionConfig(int iterations, int sampleCount, int threadCount) + { + if (sampleCount > 100000) + { + throw new ArgumentOutOfRangeException("Sample count too large, will result in overlap"); + } + + this.iterations = iterations; + samples = new int[threadCount][]; + + for (int i = 0; i < threadCount; i++) + { + samples[i] = Enumerable.Range(i * 100000, sampleCount).ToArray(); + } + } + + public int Iterations => iterations; + + public int Samples => samples[0].Length; + + public int[] GetTestData(int threadId) + { + return samples[threadId]; + } + } +} diff --git a/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs new file mode 100644 index 00000000..6a5d5ca1 --- /dev/null +++ b/BitFaster.Caching.ThroughputAnalysis/ThroughputBenchmark.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BitFaster.Caching.ThroughputAnalysis +{ + public interface IThroughputBenchmark + { + double Run(int warmup, int runs, int threads, IThroughputBenchConfig config, ICache cache); + } + + public abstract class ThroughputBenchmarkBase + { + public double Run(int warmup, int runs, int threads, IThroughputBenchConfig config, ICache cache) + { + double[] results = new double[warmup + runs]; + + for (int i = 0; i < warmup + runs; i++) + { + results[i] = Run(threads, config, cache); + } + + // return million ops/sec + const int oneMillion = 1000000; + return AverageLast(results, runs) / oneMillion; + } + + protected abstract double Run(int threads, IThroughputBenchConfig config, ICache cache); + + private static double AverageLast(double[] results, int count) + { + double result = 0; + for (int i = results.Length - count; i < results.Length; i++) + { + result += results[i]; + } + + return result / count; + } + } + + public class ReadThroughputBenchmark : ThroughputBenchmarkBase + { + protected override double Run(int threads, IThroughputBenchConfig config, ICache cache) + { + Action action = index => + { + int[] samples = config.GetTestData(index); + Func func = x => x; + + for (int i = 0; i < config.Iterations; i++) + { + for (int s = 0; s < samples.Length; s++) + { + cache.GetOrAdd(samples[s], func); + } + } + }; + + var time = ParallelBenchmark.Run(action, threads); + + // throughput = ops/sec + return (threads * config.Samples * config.Iterations) / time.TotalSeconds; + } + } + + public class UpdateThroughputBenchmark : ThroughputBenchmarkBase + { + protected override double Run(int threads, IThroughputBenchConfig config, ICache cache) + { + Action action = index => + { + int[] samples = config.GetTestData(index); + + for (int i = 0; i < config.Iterations; i++) + { + for (int s = 0; s < samples.Length; s++) + { + cache.AddOrUpdate(samples[s], samples[s]); + } + } + }; + + var time = ParallelBenchmark.Run(action, threads); + + // throughput = ops/sec + return (threads * config.Samples * config.Iterations) / time.TotalSeconds; + } + } +}