Skip to content
Merged

debug #224

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions BitFaster.Caching.UnitTests/Buffers/StripedMpscBufferTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,22 @@ public void CapacityReturnsCapacity()
buffer.Capacity.Should().Be(32);
}

[Fact]
public void CountReturnsCount()
{
buffer.Count.Should().Be(0);

for (var i = 0; i < stripeCount; i++)
{
for (var j = 0; j < bufferSize; j++)
{
buffer.TryAdd(1.ToString()).Should().Be(BufferStatus.Success);
}
}

buffer.Count.Should().Be(buffer.Capacity);
}

[Fact]
public void WhenBufferIsFullTryAddReturnsFull()
{
Expand Down
4 changes: 2 additions & 2 deletions BitFaster.Caching/Atomic/AtomicFactoryAsyncCache.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;
using System.Threading.Tasks;

namespace BitFaster.Caching.Atomic
{
[DebuggerDisplay("Count = {Count}")]
public sealed class AtomicFactoryAsyncCache<K, V> : IAsyncCache<K, V>
{
private readonly ICache<K, AsyncAtomicFactory<K, V>> cache;
Expand Down
5 changes: 2 additions & 3 deletions BitFaster.Caching/Atomic/AtomicFactoryCache.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Diagnostics;

namespace BitFaster.Caching.Atomic
{
[DebuggerDisplay("Count = {Count}")]
public sealed class AtomicFactoryCache<K, V> : ICache<K, V>
{
private readonly ICache<K, AtomicFactory<K, V>> cache;
Expand Down
4 changes: 2 additions & 2 deletions BitFaster.Caching/Atomic/AtomicFactoryScopedAsyncCache.cs
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;

namespace BitFaster.Caching.Atomic
{
[DebuggerDisplay("Count = {Count}")]
public sealed class AtomicFactoryScopedAsyncCache<K, V> : IScopedAsyncCache<K, V> where V : IDisposable
{
private readonly ICache<K, ScopedAsyncAtomicFactory<K, V>> cache;
Expand Down
5 changes: 2 additions & 3 deletions BitFaster.Caching/Atomic/AtomicFactoryScopedCache.cs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;

namespace BitFaster.Caching.Atomic
{
[DebuggerDisplay("Count = {Count}")]
public sealed class AtomicFactoryScopedCache<K, V> : IScopedCache<K, V> where V : IDisposable
{
private readonly ICache<K, ScopedAtomicFactory<K, V>> cache;
Expand Down
5 changes: 2 additions & 3 deletions BitFaster.Caching/Atomic/ScopedAsyncAtomicFactory.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;

namespace BitFaster.Caching.Atomic
{
[DebuggerDisplay("IsValueCreated={initializer == null}, Value={ScopeIfCreated}")]
public sealed class ScopedAsyncAtomicFactory<K, V> : IScoped<V>, IDisposable where V : IDisposable
{
private Scoped<V> scope;
Expand Down
5 changes: 1 addition & 4 deletions BitFaster.Caching/Atomic/ScopedAtomicFactory.cs
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace BitFaster.Caching.Atomic
{
// Requirements:
// 1. Exactly once disposal.
// 2. Exactly once invocation of value factory (synchronized create).
// 3. Resolve race between create dispose init, if disposed is called before value is created, scoped value is disposed for life.
[DebuggerDisplay("IsValueCreated={initializer == null}, Value={ScopeIfCreated}")]
public sealed class ScopedAtomicFactory<K, V> : IScoped<V>, IDisposable where V : IDisposable
{
private Scoped<V> scope;
Expand Down
2 changes: 2 additions & 0 deletions BitFaster.Caching/Buffers/MpscBoundedBuffer.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
Expand All @@ -14,6 +15,7 @@ namespace BitFaster.Caching.Buffers
/// Based on BoundedBuffer by Ben Manes.
/// https://github.com/ben-manes/caffeine/blob/master/caffeine/src/main/java/com/github/benmanes/caffeine/cache/BoundedBuffer.java
/// </remarks>
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public class MpscBoundedBuffer<T> where T : class
{
private T[] buffer;
Expand Down
5 changes: 5 additions & 0 deletions BitFaster.Caching/Buffers/StripedMpscBuffer.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading;

Expand All @@ -15,6 +17,7 @@ namespace BitFaster.Caching.Buffers
/// rehashed to select a different buffer to retry up to 3 times. Using this approach
/// writes scale linearly with number of concurrent threads.
/// </summary>
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public class StripedMpscBuffer<T> where T : class
{
const int MaxAttempts = 3;
Expand All @@ -32,6 +35,8 @@ public StripedMpscBuffer(int stripeCount, int bufferSize)
}
}

public int Count => buffers.Sum(b => b.Count);

public int Capacity => buffers.Length * buffers[0].Capacity;

public int DrainTo(T[] outputBuffer)
Expand Down
43 changes: 40 additions & 3 deletions BitFaster.Caching/Lfu/ConcurrentLfu.cs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ namespace BitFaster.Caching.Lfu
/// Based on Caffeine written by Ben Manes.
/// https://www.apache.org/licenses/LICENSE-2.0
/// </remarks>
[DebuggerTypeProxy(typeof(ConcurrentLfu<,>.LfuDebugView))]
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public sealed class ConcurrentLfu<K, V> : ICache<K, V>, IAsyncCache<K, V>, IBoundedPolicy
{
private const int MaxWriteBufferRetries = 100;
Expand Down Expand Up @@ -644,7 +646,7 @@ private void ReFitProtected()
}
}

[DebuggerDisplay("{Format()}")]
[DebuggerDisplay("{Format(),nq}")]
private class DrainStatus
{
public const int Idle = 0;
Expand Down Expand Up @@ -687,7 +689,7 @@ public int Status()
}

[ExcludeFromCodeCoverage]
private string Format()
internal string Format()
{
switch (this.drainStatus.Value)
{
Expand All @@ -705,7 +707,8 @@ private string Format()
}
}

private class CacheMetrics : ICacheMetrics
[DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
internal class CacheMetrics : ICacheMetrics
{
public long requestHitCount;
public long requestMissCount;
Expand Down Expand Up @@ -741,6 +744,40 @@ public string FormatLruString()
return sb.ToString();
}
#endif

[ExcludeFromCodeCoverage]
internal class LfuDebugView
{
private ConcurrentLfu<K, V> lfu;

public LfuDebugView(ConcurrentLfu<K, V> lfu)
{
this.lfu = lfu;
}

public string Maintenance => lfu.drainStatus.Format();

public ICacheMetrics Metrics => lfu.metrics;

public StripedMpscBuffer<LfuNode<K, V>> ReadBuffer => this.lfu.readBuffer;

public StripedMpscBuffer<LfuNode<K, V>> WriteBuffer => this.lfu.writeBuffer;

public KeyValuePair<K, V>[] Items
{
get
{
var items = new KeyValuePair<K, V>[lfu.Count];

int index = 0;
foreach (var kvp in lfu)
{
items[index++] = kvp;
}
return items;
}
}
}
}

// Explicit layout cannot be a generic class member
Expand Down
2 changes: 2 additions & 0 deletions BitFaster.Caching/Lfu/LfuCapacityPartition.cs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;

namespace BitFaster.Caching.Lfu
{
[DebuggerDisplay("{Capacity} ({Window}/{Protected}/{Probation})")]
public class LfuCapacityPartition
{
private readonly int max;
Expand Down
9 changes: 4 additions & 5 deletions BitFaster.Caching/Lru/ConcurrentLru.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Diagnostics;

namespace BitFaster.Caching.Lru
{
///<inheritdoc/>
[DebuggerTypeProxy(typeof(LruDebugView<,>))]
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public sealed class ConcurrentLru<K, V> : ConcurrentLruCore<K, V, LruItem<K, V>, LruPolicy<K, V>, TelemetryPolicy<K, V>>
{
/// <summary>
Expand Down
2 changes: 2 additions & 0 deletions BitFaster.Caching/Lru/ConcurrentLruCore.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
Expand Down Expand Up @@ -660,6 +661,7 @@ private static CachePolicy CreatePolicy(ConcurrentLruCore<K, V, I, P, T> lru)
// it becomes immutable. However, this object is then somewhere else on the
// heap, which slows down the policies with hit counter logic in benchmarks. Likely
// this approach keeps the structs data members in the same CPU cache line as the LRU.
[DebuggerDisplay("Hit = {Hits}, Miss = {Misses}, Upd = {Updated}, Evict = {Evicted}")]
private class Proxy : ICacheMetrics, ICacheEvents<K, V>, IBoundedPolicy, ITimePolicy
{
private readonly ConcurrentLruCore<K, V, I, P, T> lru;
Expand Down
6 changes: 3 additions & 3 deletions BitFaster.Caching/Lru/ConcurrentTLru.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Diagnostics;

namespace BitFaster.Caching.Lru
{
///<inheritdoc/>
[DebuggerTypeProxy(typeof(LruDebugView<,>))]
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public sealed class ConcurrentTLru<K, V> : ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, TLruLongTicksPolicy<K, V>, TelemetryPolicy<K, V>>
{
/// <summary>
Expand Down
2 changes: 2 additions & 0 deletions BitFaster.Caching/Lru/EqualCapacityPartition.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
Expand All @@ -9,6 +10,7 @@ namespace BitFaster.Caching.Lru
/// <summary>
/// A simple partitioning scheme to put an approximately equal number of items in each queue.
/// </summary>
[DebuggerDisplay("{Hot}/{Warm}/{Cold}")]
public class EqualCapacityPartition : ICapacityPartition
{
private readonly int hotCapacity;
Expand Down
7 changes: 4 additions & 3 deletions BitFaster.Caching/Lru/FastConcurrentLru.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections.Generic;
using System.Diagnostics;

namespace BitFaster.Caching.Lru
{
///<inheritdoc/>
[DebuggerTypeProxy(typeof(LruDebugView<,>))]
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public sealed class FastConcurrentLru<K, V> : ConcurrentLruCore<K, V, LruItem<K, V>, LruPolicy<K, V>, NoTelemetryPolicy<K, V>>
{
/// <summary>
Expand Down
4 changes: 3 additions & 1 deletion BitFaster.Caching/Lru/FastConcurrentTLru.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics;

namespace BitFaster.Caching.Lru
{
///<inheritdoc/>
[DebuggerTypeProxy(typeof(LruDebugView<,>))]
[DebuggerDisplay("Count = {Count}/{Capacity}")]
public sealed class FastConcurrentTLru<K, V> : ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, TLruLongTicksPolicy<K, V>, NoTelemetryPolicy<K, V>>
{
/// <summary>
Expand Down
6 changes: 2 additions & 4 deletions BitFaster.Caching/Lru/FavorWarmPartition.cs
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Diagnostics;

namespace BitFaster.Caching.Lru
{
/// <summary>
/// A capacity partitioning scheme that favors frequently accessed items by allocating 80%
/// capacity to the warm queue.
/// </summary>
[DebuggerDisplay("{Hot}/{Warm}/{Cold}")]
public class FavorWarmPartition : ICapacityPartition
{
private readonly int hotCapacity;
Expand Down
39 changes: 39 additions & 0 deletions BitFaster.Caching/Lru/LruDebugView.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;

namespace BitFaster.Caching.Lru
{
[ExcludeFromCodeCoverage]
internal class LruDebugView<K, V>
{
private readonly ICache<K, V> cache;

public LruDebugView(ICache<K, V> cache)
{
if (cache is null)
{
throw new ArgumentNullException(nameof(cache));
}

this.cache = cache;
}

public KeyValuePair<K, V>[] Items
{
get
{
var items = new KeyValuePair<K, V>[cache.Count];

var index = 0;
foreach (var kvp in cache)
{
items[index++] = kvp;
}
return items;
}
}

public ICacheMetrics Metrics => cache.Metrics.Value;
}
}
Loading