Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions BitFaster.Caching.HitRateAnalysis/Arc/RunnerConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ public RunnerConfig(string name, int[] cacheSizes, Uri dataUri)

public ArcDataFile File => this.file;

public static RunnerConfig Database = new RunnerConfig("results.arc.database.csv", new[] { 1000000, 2000000, 3000000, 4000000, 5000000, 6000000, 7000000, 8000000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/DS1.lis.gz"));
public static RunnerConfig Search = new RunnerConfig("results.arc.search.csv", new[] { 100000, 200000, 300000, 400000, 500000, 600000, 700000, 800000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/S3.lis.gz"));
public static RunnerConfig Database = new RunnerConfig("results.arc.database.csv", new[] { 1_000_000, 2_000_000, 3_000_000, 4_000_000, 5_000_000, 6_000_000, 7_000_000, 8_000_000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/DS1.lis.gz"));
public static RunnerConfig Search = new RunnerConfig("results.arc.search.csv", new[] { 100_000, 200_000, 300_000, 400_000, 500_000, 600_000, 700_000, 800_000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/S3.lis.gz"));
public static RunnerConfig Oltp = new RunnerConfig("results.arc.oltp.csv", new[] { 250, 500, 750, 1000, 1250, 1500, 1750, 2000 }, new Uri("https://github.com/bitfaster/cache-datasets/releases/download/v1.0/OLTP.lis.gz"));
}
}
2 changes: 1 addition & 1 deletion BitFaster.Caching.HitRateAnalysis/Wikibench/Runner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public static async Task Run()
a.TestKey(url);
}

if (count++ % 100000 == 0)
if (count++ % 100_000 == 0)
{
Console.WriteLine($"Processed {count} URIs...");
}
Expand Down
4 changes: 2 additions & 2 deletions BitFaster.Caching.HitRateAnalysis/Zipfian/Runner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ public class Runner
// distribution (about 45 / 20).

// Took 1 million samples
const int sampleCount = 1000000;
const int sampleCount = 1_000_000;

// Simulated a database of 50,000 pages and
// buffer sizes ranging from 2,500 (5%) items to 20,000
// (40%) items.
const int n = 50000;
const int n = 50_000;

public static void Run()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@
<None Update="NUMAExec.bat">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="SizeExec.bat">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

</Project>
33 changes: 33 additions & 0 deletions BitFaster.Caching.ThroughputAnalysis/CommandParser.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@

namespace BitFaster.Caching.ThroughputAnalysis
{
public class CommandParser
{
public static (Mode, int) Parse(string[] args)
{
// arg[0] == mode, arg[1] == size
if (args.Length == 2)
{
if (int.TryParse(args[0], out int modeArg))
{
if (int.TryParse(args[1], out int size))
{
return ((Mode)modeArg, size);
}
}
}

Mode mode = Mode.Read;
var menu = new EasyConsole.Menu()
.Add("Read", () => mode = Mode.Read)
.Add("Read + Write", () => mode = Mode.ReadWrite)
.Add("Update", () => mode = Mode.Update)
.Add("Evict", () => mode = Mode.Evict)
.Add("All", () => mode = Mode.All);

menu.Display();

return (mode, 500);
}
}
}
38 changes: 29 additions & 9 deletions BitFaster.Caching.ThroughputAnalysis/ConfigFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,47 @@ namespace BitFaster.Caching.ThroughputAnalysis
{
public class ConfigFactory
{
const double s = 0.86; // Zipf s parameter, controls distribution
const int n = 500; // number of unique items for Zipf
const int maxThreads = 52;
const int sampleCount = 2000;
const double s = 0.86; // Zipf s parameter, controls distribution

public static (ThroughputBenchmarkBase, IThroughputBenchConfig, int) Create(Mode mode, int repeatCount)
public static (ThroughputBenchmarkBase, IThroughputBenchConfig, int) Create(Mode mode, int cacheSize, int maxThreads)
{
int iterations = GetIterationCount(cacheSize);
int samples = GetSampleCount(cacheSize);
int n = cacheSize; // number of unique items for Zipf

switch (mode)
{
case Mode.Read:
return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n);
return (new ReadThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize);
case Mode.ReadWrite:
// cache holds 10% of all items
return (new ReadThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n / 10);
cacheSize = cacheSize / 10;
return (new ReadThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize);
case Mode.Update:
return (new UpdateThroughputBenchmark(), new ZipfConfig(repeatCount, sampleCount, s, n), n);
return (new UpdateThroughputBenchmark(), new ZipfConfig(iterations, samples, s, n), cacheSize);
case Mode.Evict:
return (new ReadThroughputBenchmark(), new EvictionConfig(repeatCount, sampleCount, maxThreads), n);
return (new ReadThroughputBenchmark(), new EvictionConfig(iterations, samples, maxThreads), cacheSize);
}

throw new InvalidOperationException();
}

private static int GetIterationCount(int cacheSize) => cacheSize switch
{
< 500 => 400,
< 5_000 => 200,
< 10_000 => 100,
< 100_000 => 50,
< 1_000_000 => 25,
< 10_000_000 => 5,
_ => 1
};

private static int GetSampleCount(int cacheSize) => cacheSize switch
{
< 5_000 => cacheSize * 4,
< 5_000_000 => cacheSize * 2,
_ => cacheSize
};
}
}
4 changes: 2 additions & 2 deletions BitFaster.Caching.ThroughputAnalysis/Exporter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ public void CaptureRows(IEnumerable<ICacheFactory> caches)
}
}

public void ExportCsv(Mode mode)
public void ExportCsv(Mode mode, int cacheSize)
{
using (var textWriter = File.CreateText($"Results{mode}.csv"))
using (var textWriter = File.CreateText($"Results_{mode}_{cacheSize}.csv"))
using (var csv = new CsvWriter(textWriter, CultureInfo.InvariantCulture))
{
foreach (DataColumn column in resultTable.Columns)
Expand Down
12 changes: 2 additions & 10 deletions BitFaster.Caching.ThroughputAnalysis/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,7 @@

Host.PrintInfo();

Mode mode = Mode.Read;
var (mode, size) = CommandParser.Parse(args);

var menu = new EasyConsole.Menu()
.Add("Read", () => mode = Mode.Read)
.Add("Read + Write", () => mode = Mode.ReadWrite)
.Add("Update", () => mode = Mode.Update)
.Add("Evict", () => mode = Mode.Evict)
.Add("All", () => mode = Mode.All);

menu.Display();
Runner.Run(mode);
Runner.Run(mode, size);
Console.WriteLine("Done.");
13 changes: 6 additions & 7 deletions BitFaster.Caching.ThroughputAnalysis/Runner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,25 @@ namespace BitFaster.Caching.ThroughputAnalysis
public class Runner
{
private static readonly int maxThreads = Host.GetAvailableCoreCount() * 2;
private const int repeatCount = 400;

public static void Run(Mode mode)
public static void Run(Mode mode, int cacheSize)
{
ThreadPool.SetMinThreads(maxThreads, maxThreads);

foreach (Mode value in Enum.GetValues(mode.GetType()))
{
if (mode.HasFlag(value) && value != Mode.All)
{
RunTest(value);
RunTest(value, cacheSize);
}
}
}

private static void RunTest(Mode mode)
private static void RunTest(Mode mode, int cacheSize)
{
Console.WriteLine("Generating input distribution...");

var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, repeatCount);
var (bench, dataConfig, capacity) = ConfigFactory.Create(mode, cacheSize, maxThreads);

var cachesToTest = new List<ICacheFactory>();
cachesToTest.Add(new ClassicLruFactory(capacity));
Expand All @@ -40,7 +39,7 @@ private static void RunTest(Mode mode)
exporter.Initialize(cachesToTest);

Console.WriteLine();
Console.WriteLine($"Running {mode}...");
Console.WriteLine($"Running {mode} with size {cacheSize} over {maxThreads} threads...");
Console.WriteLine();

foreach (int tc in Enumerable.Range(1, maxThreads).ToArray())
Expand All @@ -63,7 +62,7 @@ private static void RunTest(Mode mode)

exporter.CaptureRows(cachesToTest);

exporter.ExportCsv(mode);
exporter.ExportCsv(mode, cacheSize);

//ConsoleTable
// .From(resultTable)
Expand Down
9 changes: 9 additions & 0 deletions BitFaster.Caching.ThroughputAnalysis/SizeExec.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
cls

@echo off
set DOTNET_Thread_UseAllCpuGroups=1

call BitFaster.Caching.ThroughputAnalysis.exe 4 100
call BitFaster.Caching.ThroughputAnalysis.exe 4 10000
call BitFaster.Caching.ThroughputAnalysis.exe 4 1000000
call BitFaster.Caching.ThroughputAnalysis.exe 4 10000000
10 changes: 6 additions & 4 deletions BitFaster.Caching.ThroughputAnalysis/ThroughputBenchConfig.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,20 +47,22 @@ public class EvictionConfig : IThroughputBenchConfig

private int[][] samples;

const int maxSamples = 10_000_000;

public EvictionConfig(int iterations, int sampleCount, int threadCount)
{
if (sampleCount > 100000)
if (sampleCount > maxSamples)
{
throw new ArgumentOutOfRangeException("Sample count too large, will result in overlap");
}

this.iterations = iterations;
samples = new int[threadCount][];

for (int i = 0; i < threadCount; i++)
Parallel.ForEach(Enumerable.Range(0, threadCount), i =>
{
samples[i] = Enumerable.Range(i * 100000, sampleCount).ToArray();
}
samples[i] = Enumerable.Range(i * maxSamples, sampleCount).ToArray();
});
}

public int Iterations => iterations;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ public double Run(int warmup, int runs, int threads, IThroughputBenchConfig conf
}

// return million ops/sec
const int oneMillion = 1000000;
const int oneMillion = 1_000_000;
return AverageLast(results, runs) / oneMillion;
}

Expand Down
4 changes: 2 additions & 2 deletions BitFaster.Caching.UnitTests/Counters/StripedLongAdderTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ public async Task WhenAddingConcurrentlySumIsCorrect()

await Threaded.Run(4, () =>
{
for (int i = 0; i < 100000; i++)
for (int i = 0; i < 100_000; i++)
{
adder.Increment();
}
});

adder.Count().Should().Be(400000);
adder.Count().Should().Be(400_000);
}
}
}
16 changes: 8 additions & 8 deletions BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,7 @@ public void TrimWhileItemsInWriteBufferRemovesNItems()
public void VerifyHitsWithBackgroundScheduler()
{
// when running all tests in parallel, sample count drops significantly: set low bar for stability.
VerifyHits(iterations: 10000000, minSamples: 250000);
VerifyHits(iterations: 10_000_000, minSamples: 250_000);
}

//Elapsed 590.8154ms - 0.0005908154ns/op
Expand All @@ -698,7 +698,7 @@ public void VerifyHitsWithThreadPoolScheduler()
{
// when running all tests in parallel, sample count drops significantly: set low bar for stability.
cache = new ConcurrentLfu<int, int>(1, 20, new ThreadPoolScheduler(), EqualityComparer<int>.Default);
VerifyHits(iterations: 10000000, minSamples: 500000);
VerifyHits(iterations: 10_000_000, minSamples: 500_000);
}

//Elapsed 273.0148ms - 0.0002730148ns/op
Expand All @@ -708,7 +708,7 @@ public void VerifyHitsWithThreadPoolScheduler()
public void VerifyHitsWithNullScheduler()
{
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default);
VerifyHits(iterations: 10000000, minSamples: -1);
VerifyHits(iterations: 10_000_000, minSamples: -1);
}

//Will drop 78125 reads.
Expand All @@ -722,7 +722,7 @@ public void VerifyHitsWithForegroundScheduler()

// Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false
// before TryScheduleDrain is called. This serves as sanity check.
int iterations = 10000000;
int iterations = 10_000_000;
int dropped = iterations / ConcurrentLfu<int, int>.DefaultBufferSize;

this.output.WriteLine($"Will drop {dropped} reads.");
Expand All @@ -735,7 +735,7 @@ public void VerifyMisses()
{
cache = new ConcurrentLfu<int, int>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default);

int iterations = 100000;
int iterations = 100_000;
Func<int, int> func = x => x;

var start = Stopwatch.GetTimestamp();
Expand All @@ -751,7 +751,7 @@ public void VerifyMisses()

var totalTicks = end - start;
var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0;
var timeNs = timeMs / 1000000;
var timeNs = timeMs / 1_000_000;

var timePerOp = timeMs / (double)iterations;
var samplePercent = this.cache.Metrics.Value.Misses / (double)iterations * 100;
Expand All @@ -770,7 +770,7 @@ public async Task ThreadedVerifyMisses()
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default);

int threads = 4;
int iterations = 100000;
int iterations = 100_000;

await Threaded.Run(threads, i =>
{
Expand Down Expand Up @@ -808,7 +808,7 @@ private void VerifyHits(int iterations, int minSamples)

var totalTicks = end - start;
var timeMs = ((double)totalTicks / Stopwatch.Frequency) * 1000.0;
var timeNs = timeMs / 1000000;
var timeNs = timeMs / 1_000_000;

var timePerOp = timeMs / (double)iterations;
var samplePercent = this.cache.Metrics.Value.Hits / (double)iterations * 100;
Expand Down
2 changes: 1 addition & 1 deletion BitFaster.Caching.UnitTests/SingletonCacheTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public async Task AcquireWithSameKeyOnManyDifferentThreadsReturnsSameValue()
{
tasks[concurrency] = Task.Run(() =>
{
for (int i = 0; i < 100000; i++)
for (int i = 0; i < 100_000; i++)
{
using (var lifetime = cache.Acquire("Foo"))
{
Expand Down