From ff010f11dcb503ac775e19ba853393e3908c3513 Mon Sep 17 00:00:00 2001 From: John Luo Date: Wed, 5 Jul 2017 13:32:24 -0700 Subject: [PATCH] Separate out distributed memory cache to different PR Remove perf loop and just enqueue on the thread pool instaed --- Caching.sln | 5 ++- .../MemoryCache.cs | 42 +++++-------------- .../MemoryCacheServiceCollectionExtensions.cs | 38 +---------------- 3 files changed, 16 insertions(+), 69 deletions(-) diff --git a/Caching.sln b/Caching.sln index 992db086..5a410e2e 100644 --- a/Caching.sln +++ b/Caching.sln @@ -1,6 +1,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio 15 -VisualStudioVersion = 15.0.26228.9 +VisualStudioVersion = 15.0.26621.2 MinimumVisualStudioVersion = 10.0.40219.1 Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Extensions.Caching.Memory", "src\Microsoft.Extensions.Caching.Memory\Microsoft.Extensions.Caching.Memory.csproj", "{966D16D8-5D4E-4433-9DA7-F53EE44B7EE7}" EndProject @@ -243,4 +243,7 @@ Global {17E332EB-D18D-4BF5-BCA5-989E36C78B79} = {459E1593-2C11-42CB-AD17-F7597E69E5D2} {ADF83AC7-3776-4E62-A222-C6979C32DD2D} = {9E78AA8E-7870-46DE-A49F-856F5A0A9166} EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {30FC4CB3-9789-4A49-939D-03398DBE03E0} + EndGlobalSection EndGlobal diff --git a/src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs b/src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs index 7c7e22dc..74e3b895 100644 --- a/src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs +++ b/src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs @@ -19,7 +19,6 @@ namespace Microsoft.Extensions.Caching.Memory public class MemoryCache : IMemoryCache { private readonly ConcurrentDictionary _entries; - private readonly SemaphoreSlim _compactionSemaphore = new SemaphoreSlim(initialCount: 1, maxCount: 1); private long _cacheSize = 0; private bool _disposed; @@ -186,11 +185,19 @@ private void SetEntry(CacheEntry entry) { if (_options.EnforceSizeLimit) { - Interlocked.Add(ref _cacheSize, entry.Size.Value); + var updatedCacheSize = Interlocked.Add(ref _cacheSize, entry.Size.Value); - if (Interlocked.Read(ref _cacheSize) > _options.HighWatermark) + if (updatedCacheSize > _options.HighWatermark) { - TriggerOvercapacityCompaction(); + // Spawn background thread for compaction + ThreadPool.QueueUserWorkItem(new WaitCallback(_ => + { + var currentSize = Interlocked.Read(ref _cacheSize); + if (currentSize > _options.HighWatermark) + { + Compact(currentSize - _options.LowWatermark); + } + })); } } @@ -328,33 +335,6 @@ private static void ScanForExpiredItems(MemoryCache cache) } } - private void TriggerOvercapacityCompaction() - { - if (!_compactionSemaphore.Wait(0)) - { - // Another compaction is running, exit immediately. - // Avoid overpurging when multiple overcapacity compactions are triggered concurrently. - return; - } - - ThreadPool.QueueUserWorkItem(new WaitCallback(_ => - { - try - { - var currentSize = Interlocked.Read(ref _cacheSize); - while (currentSize > _options.HighWatermark) - { - Compact(currentSize - _options.LowWatermark); - currentSize = Interlocked.Read(ref _cacheSize); - } - } - finally - { - _compactionSemaphore.Release(); - } - })); - } - /// Remove at least the given percentage (0.10 for 10%) of the total entries (or estimated memory?), according to the following policy: /// 1. Remove all expired items. /// 2. Bucket by CacheItemPriority. diff --git a/src/Microsoft.Extensions.Caching.Memory/MemoryCacheServiceCollectionExtensions.cs b/src/Microsoft.Extensions.Caching.Memory/MemoryCacheServiceCollectionExtensions.cs index 76c1d175..6145cda1 100644 --- a/src/Microsoft.Extensions.Caching.Memory/MemoryCacheServiceCollectionExtensions.cs +++ b/src/Microsoft.Extensions.Caching.Memory/MemoryCacheServiceCollectionExtensions.cs @@ -80,43 +80,7 @@ public static IServiceCollection AddDistributedMemoryCache(this IServiceCollecti throw new ArgumentNullException(nameof(services)); } - return services.AddDistributedMemoryCache(_ => { }); - } - - /// - /// Adds a default implementation of that stores items in memory - /// to the . Frameworks that require a distributed cache to work - /// can safely add this dependency as part of their dependency list to ensure that there is at least - /// one implementation available. - /// - /// - /// should only be used in single - /// server scenarios as this cache stores items in memory and doesn't expand across multiple machines. - /// For those scenarios it is recommended to use a proper distributed cache that can expand across - /// multiple machines. - /// - /// The to add services to. - /// - /// The to configure the that is used by the . - /// - /// The so that additional calls can be chained. - public static IServiceCollection AddDistributedMemoryCache(this IServiceCollection services, Action setupAction) - { - if (services == null) - { - throw new ArgumentNullException(nameof(services)); - } - - if (setupAction == null) - { - throw new ArgumentNullException(nameof(services)); - } - - var memoryCacheOptions = new MemoryCacheOptions(); - setupAction(memoryCacheOptions); - - services.TryAddSingleton(new MemoryDistributedCache(new MemoryCache(memoryCacheOptions))); - + services.TryAddSingleton(new MemoryDistributedCache(new MemoryCache(new MemoryCacheOptions()))); return services; } }