From 8c3b9ca3e15a03e9001bc4a5e24cade325466b64 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Fri, 30 Aug 2024 16:02:27 +0100 Subject: [PATCH 1/6] investigate and resolve https://github.com/dotnet/aspnetcore/discussions/57582 --- .../Internal/DefaultHybridCache.CacheItem.cs | 2 + .../DefaultHybridCache.ImmutableCacheItem.cs | 14 ++- .../Internal/DefaultHybridCache.L2.cs | 17 ++- .../DefaultHybridCache.MutableCacheItem.cs | 42 +++---- .../DefaultHybridCache.StampedeStateT.cs | 106 +++++++++++++----- .../SizeTests.cs | 95 ++++++++++++++++ 6 files changed, 229 insertions(+), 47 deletions(-) create mode 100644 test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs index 1f8585d95d5..5585b9b2a29 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs @@ -87,6 +87,8 @@ protected virtual void OnFinalRelease() // any required release semantics internal abstract class CacheItem : CacheItem { + public abstract bool TryGetSize(out long size); + // attempt to get a value that was *not* previously reserved public abstract bool TryGetValue(out T value); diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs index 2118fc39247..9ae8468ba29 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs @@ -13,6 +13,8 @@ internal partial class DefaultHybridCache private T _value = default!; // deferred until SetValue + public long Size { get; private set; } = -1; + public override bool DebugIsImmutable => true; // get a shared instance that passes as "reserved"; doesn't need to be 100% singleton, @@ -30,7 +32,11 @@ public static ImmutableCacheItem GetReservedShared() return obj; } - public void SetValue(T value) => _value = value; + public void SetValue(T value, long size) + { + _value = value; + Size = size; + } public override bool TryGetValue(out T value) { @@ -38,6 +44,12 @@ public override bool TryGetValue(out T value) return true; // always available } + public override bool TryGetSize(out long size) + { + size = Size; + return size >= 0; + } + public override bool TryReserveBuffer(out BufferChunk buffer) { buffer = default; diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs index 5c08aecb9ef..2bcbe901315 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs @@ -95,13 +95,26 @@ internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions? if (value.TryReserve()) { // based on CacheExtensions.Set, but with post-eviction recycling - using var cacheEntry = _localCache.CreateEntry(key); + + // intentionally use manual Dispose rather than "using"; confusingly, it is Dispose() + // that actually commits the add - so: if we fault, we don't want to try + // committing a partially configured cache entry + var cacheEntry = _localCache.CreateEntry(key); cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration; cacheEntry.Value = value; + + if (value.TryGetSize(out var size)) + { + cacheEntry = cacheEntry.SetSize(size); + } + if (value.NeedsEvictionCallback) { - _ = cacheEntry.RegisterPostEvictionCallback(CacheItem.SharedOnEviction); + cacheEntry = cacheEntry.RegisterPostEvictionCallback(CacheItem.SharedOnEviction); } + + // commit + cacheEntry.Dispose(); } } diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs index 8ce93b79c4a..2d02c23b6d8 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs @@ -21,34 +21,38 @@ public void SetValue(ref BufferChunk buffer, IHybridCacheSerializer serialize buffer = default; // we're taking over the lifetime; the caller no longer has it! } - public void SetValue(T value, IHybridCacheSerializer serializer, int maxLength) - { - _serializer = serializer; - var writer = RecyclableArrayBufferWriter.Create(maxLength); - serializer.Serialize(value, writer); - - _buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); - writer.Dispose(); // no buffers left (we just detached them), but just in case of other logic - } - public override bool TryGetValue(out T value) { // only if we haven't already burned - if (!TryReserve()) + if (TryReserve()) { - value = default!; - return false; + try + { + value = _serializer.Deserialize(_buffer.AsSequence()); + return true; + } + finally + { + _ = Release(); + } } - try - { - value = _serializer.Deserialize(_buffer.AsSequence()); - return true; - } - finally + value = default!; + return false; + } + + public override bool TryGetSize(out long size) + { + // only if we haven't already burned + if (TryReserve()) { + size = _buffer.Length; _ = Release(); + return true; } + + size = 0; + return false; } public override bool TryReserveBuffer(out BufferChunk buffer) diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs index 842444c8666..403b5c894e1 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs @@ -6,6 +6,7 @@ using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; +using static Microsoft.Extensions.Caching.Hybrid.Internal.DefaultHybridCache; namespace Microsoft.Extensions.Caching.Hybrid.Internal; @@ -175,29 +176,64 @@ private async Task BackgroundFetchAsync() // nothing from L2; invoke the underlying data store if ((Key.Flags & HybridCacheEntryFlags.DisableUnderlyingData) == 0) { - var cacheItem = SetResult(await _underlying!(_state!, SharedToken).ConfigureAwait(false)); - - // note that at this point we've already released most or all of the waiting callers; everything - // else here is background - - // write to L2 if appropriate - if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheWrite) == 0) + // invoke the callback supplied by the caller + var newValue = await _underlying!(_state!, SharedToken).ConfigureAwait(false); + + // If we're writing this value *anywhere*, we're going to need to serialize; this is obvious + // in the case of L2, but we also need it for L1, because MemoryCache might be enforcing + // SizeLimit (we can't know - it is an abstraction), and for *that* we need to know the item size. + // Likewise, if we're writing to a MutableCacheItem, we'll be serializing *anyway* for the payload. + // + // Rephrasing that: the only scenario in which we *do not* need to serialize is if: + // - it is an ImmutableCacheItem + // - we're writing neither to L1 nor L2 + + const HybridCacheEntryFlags DisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; + var cacheItem = CacheItem; + bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & DisableL1AndL2) == DisableL1AndL2; + + if (skipSerialize) { - if (cacheItem.TryReserveBuffer(out var buffer)) - { - // mutable: we've already serialized it for the shared cache item - await Cache.SetL2Async(Key.Key, in buffer, _options, SharedToken).ConfigureAwait(false); - _ = cacheItem.Release(); // because we reserved - } - else if (cacheItem.TryGetValue(out var value)) + SetImmutableResultWithoutSerialize(newValue); + } + else if (cacheItem.TryReserve()) + { + // ^^^ The first thing we need to do is make sure we're not getting into a thread race over buffer disposal. + // In particular, if this cache item is somehow so short-lived that the buffers would be released *before* we're + // done writing them to L2, which happens *after* we've provided the value to consumers. + var writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); // note this lifetime spans the SetL2Async + var serializer = Cache.GetSerializer(); + serializer.Serialize(newValue, writer); + BufferChunk buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); // remove buffer ownership from the writer + writer.Dispose(); // we're done with the writer + + // protect "buffer" (this is why we "reserved"); we don't want SetResult to nuke our local + var snapshot = buffer; + SetResultPreSerialized(newValue, ref snapshot, serializer); + + // Note that at this point we've already released most or all of the waiting callers. Everything + // from this point onwards happens in the background, from the perspective of the calling code. + + // Write to L2 if appropriate. + if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheWrite) == 0) { - // immutable: we'll need to do the serialize ourselves - var writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); // note this lifetime spans the SetL2Async - Cache.GetSerializer().Serialize(value, writer); - buffer = new(writer.GetBuffer(out var length), length, returnToPool: false); // writer still owns the buffer + // We already have the payload serialized, so this is trivial to do. await Cache.SetL2Async(Key.Key, in buffer, _options, SharedToken).ConfigureAwait(false); - writer.Dispose(); // recycle on success } + + // Release our hook on the CacheItem (only really important for "mutable"). + _ = cacheItem.Release(); + + // Finally, recycle whatever was left over from SetResultPreSerialized; using "snapshot" + // here is NOT a typo; if SetResultPreSerialized left this value alone (immutable), then + // this is our recycle step; if SetResultPreSerialized transferred ownership to the (mutable) + // CacheItem, then this becomes a no-op, and the buffer only gets fully recycled when the + // CacheItem itself is fully clear. + snapshot.RecycleIfAppropriate(); + } + else + { + throw new InvalidOperationException("Internal HybridCache failure: unable to reserve cache item to assign result"); } } else @@ -243,7 +279,7 @@ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value) { case ImmutableCacheItem immutable: // deserialize; and store object; buffer can be recycled now - immutable.SetValue(serializer.Deserialize(new(value.Array!, 0, value.Length))); + immutable.SetValue(serializer.Deserialize(new(value.Array!, 0, value.Length)), value.Length); value.RecycleIfAppropriate(); cacheItem = immutable; break; @@ -261,7 +297,7 @@ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value) SetResult(cacheItem); } - private CacheItem SetResult(T value) + private void SetImmutableResultWithoutSerialize(T value) { // set a result from a value we calculated directly CacheItem cacheItem; @@ -269,12 +305,33 @@ private CacheItem SetResult(T value) { case ImmutableCacheItem immutable: // no serialize needed - immutable.SetValue(value); + immutable.SetValue(value, size: -1); + cacheItem = immutable; + break; + default: + cacheItem = ThrowUnexpectedCacheItem(); + break; + } + + SetResult(cacheItem); + } + + private void SetResultPreSerialized(T value, ref BufferChunk buffer, IHybridCacheSerializer serializer) + { + // set a result from a value we calculated directly that + // has ALREADY BEEN SERIALIZED (we can optionally consume this buffer) + CacheItem cacheItem; + switch (CacheItem) + { + case ImmutableCacheItem immutable: + // no serialize needed + immutable.SetValue(value, size: buffer.Length); cacheItem = immutable; + + // (but leave the buffer alone) break; case MutableCacheItem mutable: - // serialization happens here - mutable.SetValue(value, Cache.GetSerializer(), MaximumPayloadBytes); + mutable.SetValue(ref buffer, serializer); mutable.DebugOnlyTrackBuffer(Cache); cacheItem = mutable; break; @@ -284,7 +341,6 @@ private CacheItem SetResult(T value) } SetResult(cacheItem); - return cacheItem; } private void SetResult(CacheItem value) diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs new file mode 100644 index 00000000000..5f03e29f7a6 --- /dev/null +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs @@ -0,0 +1,95 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Extensions.Caching.Hybrid.Internal; +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.Extensions.Caching.Hybrid.Tests; + +public class SizeTests +{ + [Theory] + [InlineData(null, true)] // does not enforce size limits + [InlineData(8L, false)] // unreasonably small limt; chosen because our test string has length 12 - hence no expectation to find the second time + [InlineData(1024L, true)] // reasonable size limit + public async Task ValidateSizeLimit_Immutable(long? sizeLimit, bool expectFromL1) + { + var services = new ServiceCollection(); + services.AddMemoryCache(options => options.SizeLimit = sizeLimit); + services.AddHybridCache(); + using var provider = services.BuildServiceProvider(); + var cache = Assert.IsType(provider.GetRequiredService()); + + const string Key = "abc"; + + // this looks weird; it is intentionally not a const - we want to check + // same instance without worrying about interning from raw literals + string expected = new("simple value".ToArray()); + var actual = await cache.GetOrCreateAsync(Key, ct => new(expected)); + + // expect same contents + Assert.Equal(expected, actual); + + // expect same instance, because string is special-cased as a type + // that doesn't need defensive copies + Assert.Same(expected, actual); + + // rinse and repeat, to check we get the value from L1 + actual = await cache.GetOrCreateAsync(Key, ct => new(Guid.NewGuid().ToString())); + + if (expectFromL1) + { + // expect same contents from L1 + Assert.Equal(expected, actual); + + // expect same instance, because string is special-cased as a type + // that doesn't need defensive copies + Assert.Same(expected, actual); + } + else + { + // L1 cache not used + Assert.NotEqual(expected, actual); + } + } + + [Theory] + [InlineData(null, true)] // does not enforce size limits + [InlineData(8L, false)] // unreasonably small limt; chosen because our test string has length 12 - hence no expectation to find the second time + [InlineData(1024L, true)] // reasonable size limit + public async Task ValidateSizeLimit_Mutable(long? sizeLimit, bool expectFromL1) + { + var services = new ServiceCollection(); + services.AddMemoryCache(options => options.SizeLimit = sizeLimit); + services.AddHybridCache(); + using var provider = services.BuildServiceProvider(); + var cache = Assert.IsType(provider.GetRequiredService()); + + const string Key = "abc"; + + string expected = "simple value"; + var actual = await cache.GetOrCreateAsync(Key, ct => new(new MutablePoco { Value = expected })); + + // expect same contents + Assert.Equal(expected, actual.Value); + + // rinse and repeat, to check we get the value from L1 + actual = await cache.GetOrCreateAsync(Key, ct => new(new MutablePoco { Value = Guid.NewGuid().ToString() })); + + if (expectFromL1) + { + // expect same contents from L1 + Assert.Equal(expected, actual.Value); + } + else + { + // L1 cache not used + Assert.NotEqual(expected, actual.Value); + } + } + + public class MutablePoco + { + public string Value { get; set; } = ""; + } +} From c87b90516ef6845b99569ea83dc8573211e7f435 Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Thu, 12 Sep 2024 18:36:41 +0100 Subject: [PATCH 2/6] typos --- .../Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs index 5f03e29f7a6..31010e1a235 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs @@ -10,7 +10,7 @@ public class SizeTests { [Theory] [InlineData(null, true)] // does not enforce size limits - [InlineData(8L, false)] // unreasonably small limt; chosen because our test string has length 12 - hence no expectation to find the second time + [InlineData(8L, false)] // unreasonably small limit; chosen because our test string has length 12 - hence no expectation to find the second time [InlineData(1024L, true)] // reasonable size limit public async Task ValidateSizeLimit_Immutable(long? sizeLimit, bool expectFromL1) { @@ -55,7 +55,7 @@ public async Task ValidateSizeLimit_Immutable(long? sizeLimit, bool expectFromL1 [Theory] [InlineData(null, true)] // does not enforce size limits - [InlineData(8L, false)] // unreasonably small limt; chosen because our test string has length 12 - hence no expectation to find the second time + [InlineData(8L, false)] // unreasonably small limit; chosen because our test string has length 12 - hence no expectation to find the second time [InlineData(1024L, true)] // reasonable size limit public async Task ValidateSizeLimit_Mutable(long? sizeLimit, bool expectFromL1) { From 30f96c60e8ce49f07b37e8ae89505f25865ef15d Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Fri, 13 Sep 2024 09:11:57 +0100 Subject: [PATCH 3/6] clarify that _underlying returns T --- .../Internal/DefaultHybridCache.StampedeStateT.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs index 403b5c894e1..5304e353152 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs @@ -177,7 +177,7 @@ private async Task BackgroundFetchAsync() if ((Key.Flags & HybridCacheEntryFlags.DisableUnderlyingData) == 0) { // invoke the callback supplied by the caller - var newValue = await _underlying!(_state!, SharedToken).ConfigureAwait(false); + T newValue = await _underlying!(_state!, SharedToken).ConfigureAwait(false); // If we're writing this value *anywhere*, we're going to need to serialize; this is obvious // in the case of L2, but we also need it for L1, because MemoryCache might be enforcing From 5e3bdbafb8f2b59c811c7d66fb225a11ef379e2a Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 18 Sep 2024 11:51:22 +0100 Subject: [PATCH 4/6] fewer vars; more explicit types (PR feedback) --- .../Internal/DefaultHybridCache.L2.cs | 4 ++-- .../DefaultHybridCache.StampedeStateT.cs | 10 +++++----- .../BufferReleaseTests.cs | 10 +++++----- .../DistributedCacheTests.cs | 4 ++-- .../FunctionalTests.cs | 2 +- .../L2Tests.cs | 2 +- .../RedisTests.cs | 2 +- .../SampleUsage.cs | 8 ++++---- .../ServiceConstructionTests.cs | 20 +++++++++---------- .../SizeTests.cs | 4 ++-- .../StampedeTests.cs | 2 +- 11 files changed, 34 insertions(+), 34 deletions(-) diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs index 2bcbe901315..1e694448737 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs @@ -34,7 +34,7 @@ internal ValueTask GetFromL2Async(string key, CancellationToken tok return new(GetValidPayloadSegment(pendingLegacy.Result)); // already complete case CacheFeatures.BackendCache | CacheFeatures.BackendBuffers: // IBufferWriter-based - var writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); + RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); var cache = Unsafe.As(_backendCache!); // type-checked already var pendingBuffers = cache.TryGetAsync(key, writer, token); if (!pendingBuffers.IsCompletedSuccessfully) @@ -99,7 +99,7 @@ internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions? // intentionally use manual Dispose rather than "using"; confusingly, it is Dispose() // that actually commits the add - so: if we fault, we don't want to try // committing a partially configured cache entry - var cacheEntry = _localCache.CreateEntry(key); + ICacheEntry cacheEntry = _localCache.CreateEntry(key); cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration; cacheEntry.Value = value; diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs index 5304e353152..a24584b8275 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs @@ -189,7 +189,7 @@ private async Task BackgroundFetchAsync() // - we're writing neither to L1 nor L2 const HybridCacheEntryFlags DisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; - var cacheItem = CacheItem; + CacheItem cacheItem = CacheItem; bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & DisableL1AndL2) == DisableL1AndL2; if (skipSerialize) @@ -201,14 +201,14 @@ private async Task BackgroundFetchAsync() // ^^^ The first thing we need to do is make sure we're not getting into a thread race over buffer disposal. // In particular, if this cache item is somehow so short-lived that the buffers would be released *before* we're // done writing them to L2, which happens *after* we've provided the value to consumers. - var writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); // note this lifetime spans the SetL2Async - var serializer = Cache.GetSerializer(); + RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); // note this lifetime spans the SetL2Async + IHybridCacheSerializer serializer = Cache.GetSerializer(); serializer.Serialize(newValue, writer); BufferChunk buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); // remove buffer ownership from the writer writer.Dispose(); // we're done with the writer // protect "buffer" (this is why we "reserved"); we don't want SetResult to nuke our local - var snapshot = buffer; + BufferChunk snapshot = buffer; SetResultPreSerialized(newValue, ref snapshot, serializer); // Note that at this point we've already released most or all of the waiting callers. Everything @@ -273,7 +273,7 @@ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value) // set a result from L2 cache Debug.Assert(value.Array is not null, "expected buffer"); - var serializer = Cache.GetSerializer(); + IHybridCacheSerializer serializer = Cache.GetSerializer(); CacheItem cacheItem; switch (CacheItem) { diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs index 3318a86fd70..4996406c09a 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs @@ -19,7 +19,7 @@ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Act var services = new ServiceCollection(); config?.Invoke(services); services.AddHybridCache(); - var provider = services.BuildServiceProvider(); + ServiceProvider provider = services.BuildServiceProvider(); cache = Assert.IsType(provider.GetRequiredService()); return provider; } @@ -117,8 +117,8 @@ private static bool Write(IBufferWriter destination, byte[]? buffer) // prep the backend with our data var key = Me(); Assert.NotNull(cache.BackendCache); - var serializer = cache.GetSerializer(); - using (var writer = RecyclableArrayBufferWriter.Create(int.MaxValue)) + IHybridCacheSerializer serializer = cache.GetSerializer(); + using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue)) { serializer.Serialize(await GetAsync(), writer); cache.BackendCache.Set(key, writer.ToArray()); @@ -176,8 +176,8 @@ private static bool Write(IBufferWriter destination, byte[]? buffer) // prep the backend with our data var key = Me(); Assert.NotNull(cache.BackendCache); - var serializer = cache.GetSerializer(); - using (var writer = RecyclableArrayBufferWriter.Create(int.MaxValue)) + IHybridCacheSerializer serializer = cache.GetSerializer(); + using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue)) { serializer.Serialize(await GetAsync(), writer); cache.BackendCache.Set(key, writer.ToArray()); diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs index 4f3766990cc..5a565866f63 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs @@ -185,7 +185,7 @@ public async Task ReadOnlySequenceBufferRoundtrip(int size, SequenceKind kind) Assert.Equal(size, expected.Length); cache.Set(key, payload, _fiveMinutes); - var writer = RecyclableArrayBufferWriter.Create(int.MaxValue); + RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue); Assert.True(cache.TryGet(key, writer)); Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span)); writer.ResetInPlace(); @@ -247,7 +247,7 @@ public async Task ReadOnlySequenceBufferRoundtripAsync(int size, SequenceKind ki Assert.Equal(size, expected.Length); await cache.SetAsync(key, payload, _fiveMinutes); - var writer = RecyclableArrayBufferWriter.Create(int.MaxValue); + RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue); Assert.True(await cache.TryGetAsync(key, writer)); Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span)); writer.ResetInPlace(); diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs index 4cacdd59f6f..5edd99722ac 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs @@ -13,7 +13,7 @@ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Act var services = new ServiceCollection(); config?.Invoke(services); services.AddHybridCache(); - var provider = services.BuildServiceProvider(); + ServiceProvider provider = services.BuildServiceProvider(); cache = Assert.IsType(provider.GetRequiredService()); return provider; } diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs index bf1f7a35fee..850c6a054b9 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs @@ -38,7 +38,7 @@ private ServiceProvider GetDefaultCache(bool buffers, out DefaultHybridCache cac var localCache = new MemoryDistributedCache(localCacheOptions); services.AddSingleton(buffers ? new BufferLoggingCache(Log, localCache) : new LoggingCache(Log, localCache)); services.AddHybridCache(); - var provider = services.BuildServiceProvider(); + ServiceProvider provider = services.BuildServiceProvider(); cache = Assert.IsType(provider.GetRequiredService()); return provider; } diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/RedisTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/RedisTests.cs index d482f566a16..86303044c48 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/RedisTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/RedisTests.cs @@ -45,7 +45,7 @@ public async Task BasicUsage(bool useBuffers) var services = new ServiceCollection(); await ConfigureAsync(services); services.AddHybridCache(); - var provider = services.BuildServiceProvider(); // not "using" - that will tear down our redis; use the fixture for that + ServiceProvider provider = services.BuildServiceProvider(); // not "using" - that will tear down our redis; use the fixture for that var cache = Assert.IsType(provider.GetRequiredService()); if (cache.BackendCache is null) diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs index 300fd6e4188..0172525b128 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs @@ -16,7 +16,7 @@ public async Task DistributedCacheWorks() var services = new ServiceCollection(); services.AddDistributedMemoryCache(); services.AddTransient(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var obj = provider.GetRequiredService(); string name = "abc"; @@ -36,7 +36,7 @@ public async Task HybridCacheWorks() var services = new ServiceCollection(); services.AddHybridCache(); services.AddTransient(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var obj = provider.GetRequiredService(); string name = "abc"; @@ -56,7 +56,7 @@ public async Task HybridCacheNoCaptureWorks() var services = new ServiceCollection(); services.AddHybridCache(); services.AddTransient(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var obj = provider.GetRequiredService(); string name = "abc"; @@ -76,7 +76,7 @@ public async Task HybridCacheNoCaptureObjReuseWorks() var services = new ServiceCollection(); services.AddHybridCache(); services.AddTransient(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var obj = provider.GetRequiredService(); string name = "abc"; diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs index 14f18e72c1e..decc47d3964 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs @@ -27,7 +27,7 @@ public void CanCreateDefaultService() { var services = new ServiceCollection(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); Assert.IsType(provider.GetService()); } @@ -40,7 +40,7 @@ public void CanCreateServiceWithManualOptions() options.MaximumKeyLength = 937; options.DefaultEntryOptions = new() { Expiration = TimeSpan.FromSeconds(120), Flags = HybridCacheEntryFlags.DisableLocalCacheRead }; }); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var obj = Assert.IsType(provider.GetService()); var options = obj.Options; Assert.Equal(937, options.MaximumKeyLength); @@ -88,7 +88,7 @@ public async Task BasicStatelessUsage() { var services = new ServiceCollection(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = provider.GetRequiredService(); var expected = Guid.NewGuid().ToString(); @@ -101,7 +101,7 @@ public async Task BasicStatefulUsage() { var services = new ServiceCollection(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = provider.GetRequiredService(); var expected = Guid.NewGuid().ToString(); @@ -114,7 +114,7 @@ public void DefaultSerializerConfiguration() { var services = new ServiceCollection(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.IsType(cache.GetSerializer()); @@ -128,7 +128,7 @@ public void CustomSerializerConfiguration() { var services = new ServiceCollection(); services.AddHybridCache().AddSerializer(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.IsType(cache.GetSerializer()); @@ -140,7 +140,7 @@ public void CustomSerializerFactoryConfiguration() { var services = new ServiceCollection(); services.AddHybridCache().AddSerializerFactory(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.IsType(cache.GetSerializer()); @@ -163,7 +163,7 @@ public void DefaultMemoryDistributedCacheIsIgnored(bool manual) } services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.Null(cache.BackendCache); @@ -175,7 +175,7 @@ public void SubclassMemoryDistributedCacheIsNotIgnored() var services = new ServiceCollection(); services.AddSingleton(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.NotNull(cache.BackendCache); @@ -198,7 +198,7 @@ public void SubclassMemoryCacheIsNotIgnored(bool manual) services.AddSingleton(); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); Assert.NotNull(cache.BackendCache); diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs index 31010e1a235..119c2297882 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs @@ -17,7 +17,7 @@ public async Task ValidateSizeLimit_Immutable(long? sizeLimit, bool expectFromL1 var services = new ServiceCollection(); services.AddMemoryCache(options => options.SizeLimit = sizeLimit); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); const string Key = "abc"; @@ -62,7 +62,7 @@ public async Task ValidateSizeLimit_Mutable(long? sizeLimit, bool expectFromL1) var services = new ServiceCollection(); services.AddMemoryCache(options => options.SizeLimit = sizeLimit); services.AddHybridCache(); - using var provider = services.BuildServiceProvider(); + using ServiceProvider provider = services.BuildServiceProvider(); var cache = Assert.IsType(provider.GetRequiredService()); const string Key = "abc"; diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs index b804fad503f..4680f589f98 100644 --- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs +++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs @@ -24,7 +24,7 @@ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache) Flags = HybridCacheEntryFlags.DisableDistributedCache | HybridCacheEntryFlags.DisableLocalCache }; }); - var provider = services.BuildServiceProvider(); + ServiceProvider provider = services.BuildServiceProvider(); cache = Assert.IsType(provider.GetRequiredService()); return provider; } From 2f35529b3d3b6c5e4bfdf101a7981abfd14fb94e Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 18 Sep 2024 15:35:15 +0100 Subject: [PATCH 5/6] PR feedback (mostly comments, extra Debug.Assert) --- .../DefaultHybridCache.StampedeStateT.cs | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs index a24584b8275..82210d43097 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs @@ -14,8 +14,7 @@ internal partial class DefaultHybridCache { internal sealed class StampedeState : StampedeState { - [DoesNotReturn] - private static CacheItem ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item"); + const HybridCacheEntryFlags FlagsDisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; private readonly TaskCompletionSource>? _result; private TState? _state; @@ -41,6 +40,9 @@ public StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationT public override Type Type => typeof(T); + [DoesNotReturn] + private static CacheItem ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item"); + public void QueueUserWorkItem(in TState state, Func> underlying, HybridCacheEntryOptions? options) { Debug.Assert(_underlying is null, "should not already have factory field"); @@ -188,9 +190,8 @@ private async Task BackgroundFetchAsync() // - it is an ImmutableCacheItem // - we're writing neither to L1 nor L2 - const HybridCacheEntryFlags DisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; CacheItem cacheItem = CacheItem; - bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & DisableL1AndL2) == DisableL1AndL2; + bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & FlagsDisableL1AndL2) == FlagsDisableL1AndL2; if (skipSerialize) { @@ -207,9 +208,18 @@ private async Task BackgroundFetchAsync() BufferChunk buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); // remove buffer ownership from the writer writer.Dispose(); // we're done with the writer - // protect "buffer" (this is why we "reserved"); we don't want SetResult to nuke our local - BufferChunk snapshot = buffer; - SetResultPreSerialized(newValue, ref snapshot, serializer); + // protect "buffer" (this is why we "reserved") for writing to L2 if needed; SetResultPreSerialized + // *may* (depending on context) claim this buffer, in which case "bufferToRelease" gets reset, and + // the final RecycleIfAppropriate() is a no-op; however, the buffer is valid in either event, + // (with TryReserve above guaranteeing that we aren't in a race condition). + BufferChunk bufferToRelease = buffer; + + // and since "bufferToRelease" is the thing that will be returned at some point, we can make it explicit + // that we do not need or want "buffer" to do any recycling (they're the same memory) + buffer = buffer.DoNotReturnToPool(); + + // set the underlying result for this operation (includes L1 write if appropriate) + SetResultPreSerialized(newValue, ref bufferToRelease, serializer); // Note that at this point we've already released most or all of the waiting callers. Everything // from this point onwards happens in the background, from the perspective of the calling code. @@ -224,12 +234,12 @@ private async Task BackgroundFetchAsync() // Release our hook on the CacheItem (only really important for "mutable"). _ = cacheItem.Release(); - // Finally, recycle whatever was left over from SetResultPreSerialized; using "snapshot" + // Finally, recycle whatever was left over from SetResultPreSerialized; using "bufferToRelease" // here is NOT a typo; if SetResultPreSerialized left this value alone (immutable), then // this is our recycle step; if SetResultPreSerialized transferred ownership to the (mutable) // CacheItem, then this becomes a no-op, and the buffer only gets fully recycled when the // CacheItem itself is fully clear. - snapshot.RecycleIfAppropriate(); + bufferToRelease.RecycleIfAppropriate(); } else { @@ -299,6 +309,8 @@ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value) private void SetImmutableResultWithoutSerialize(T value) { + Debug.Assert((Key.Flags & FlagsDisableL1AndL2) == FlagsDisableL1AndL2, "Only expected if L1+L2 disabled"); + // set a result from a value we calculated directly CacheItem cacheItem; switch (CacheItem) From f0da5afb19d4d2aea49241d8380cbe588225321d Mon Sep 17 00:00:00 2001 From: Marc Gravell Date: Wed, 18 Sep 2024 16:00:30 +0100 Subject: [PATCH 6/6] build warn should be err ;p --- .../Internal/DefaultHybridCache.StampedeStateT.cs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs index 82210d43097..4e45acae930 100644 --- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs +++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs @@ -14,7 +14,7 @@ internal partial class DefaultHybridCache { internal sealed class StampedeState : StampedeState { - const HybridCacheEntryFlags FlagsDisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; + private const HybridCacheEntryFlags FlagsDisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite; private readonly TaskCompletionSource>? _result; private TState? _state; @@ -40,9 +40,6 @@ public StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationT public override Type Type => typeof(T); - [DoesNotReturn] - private static CacheItem ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item"); - public void QueueUserWorkItem(in TState state, Func> underlying, HybridCacheEntryOptions? options) { Debug.Assert(_underlying is null, "should not already have factory field"); @@ -157,6 +154,9 @@ static async Task AwaitedAsync(Task> task) => (await task.ConfigureAwait(false)).GetReservedValue(); } + [DoesNotReturn] + private static CacheItem ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item"); + [SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "In this case the cancellation token is provided internally via SharedToken")] [SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "Exception is passed through to faulted task result")] private async Task BackgroundFetchAsync()