Skip to content
This repository has been archived by the owner on Dec 14, 2018. It is now read-only.

Commit

Permalink
Separate out distributed memory cache to different PR
Browse files Browse the repository at this point in the history
Remove perf loop and just enqueue on the thread pool instaed
  • Loading branch information
JunTaoLuo committed Jul 5, 2017
1 parent a6b33f7 commit ff010f1
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 69 deletions.
5 changes: 4 additions & 1 deletion Caching.sln
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26228.9
VisualStudioVersion = 15.0.26621.2
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Extensions.Caching.Memory", "src\Microsoft.Extensions.Caching.Memory\Microsoft.Extensions.Caching.Memory.csproj", "{966D16D8-5D4E-4433-9DA7-F53EE44B7EE7}"
EndProject
Expand Down Expand Up @@ -243,4 +243,7 @@ Global
{17E332EB-D18D-4BF5-BCA5-989E36C78B79} = {459E1593-2C11-42CB-AD17-F7597E69E5D2}
{ADF83AC7-3776-4E62-A222-C6979C32DD2D} = {9E78AA8E-7870-46DE-A49F-856F5A0A9166}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {30FC4CB3-9789-4A49-939D-03398DBE03E0}
EndGlobalSection
EndGlobal
42 changes: 11 additions & 31 deletions src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ namespace Microsoft.Extensions.Caching.Memory
public class MemoryCache : IMemoryCache
{
private readonly ConcurrentDictionary<object, CacheEntry> _entries;
private readonly SemaphoreSlim _compactionSemaphore = new SemaphoreSlim(initialCount: 1, maxCount: 1);
private long _cacheSize = 0;
private bool _disposed;

Expand Down Expand Up @@ -186,11 +185,19 @@ private void SetEntry(CacheEntry entry)
{
if (_options.EnforceSizeLimit)
{
Interlocked.Add(ref _cacheSize, entry.Size.Value);
var updatedCacheSize = Interlocked.Add(ref _cacheSize, entry.Size.Value);

if (Interlocked.Read(ref _cacheSize) > _options.HighWatermark)
if (updatedCacheSize > _options.HighWatermark)
{
TriggerOvercapacityCompaction();
// Spawn background thread for compaction
ThreadPool.QueueUserWorkItem(new WaitCallback(_ =>
{
var currentSize = Interlocked.Read(ref _cacheSize);
if (currentSize > _options.HighWatermark)
{
Compact(currentSize - _options.LowWatermark);
}
}));
}
}

Expand Down Expand Up @@ -328,33 +335,6 @@ private static void ScanForExpiredItems(MemoryCache cache)
}
}

private void TriggerOvercapacityCompaction()
{
if (!_compactionSemaphore.Wait(0))
{
// Another compaction is running, exit immediately.
// Avoid overpurging when multiple overcapacity compactions are triggered concurrently.
return;
}

ThreadPool.QueueUserWorkItem(new WaitCallback(_ =>
{
try
{
var currentSize = Interlocked.Read(ref _cacheSize);
while (currentSize > _options.HighWatermark)
{
Compact(currentSize - _options.LowWatermark);
currentSize = Interlocked.Read(ref _cacheSize);
}
}
finally
{
_compactionSemaphore.Release();
}
}));
}

/// Remove at least the given percentage (0.10 for 10%) of the total entries (or estimated memory?), according to the following policy:
/// 1. Remove all expired items.
/// 2. Bucket by CacheItemPriority.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,43 +80,7 @@ public static IServiceCollection AddDistributedMemoryCache(this IServiceCollecti
throw new ArgumentNullException(nameof(services));
}

return services.AddDistributedMemoryCache(_ => { });
}

/// <summary>
/// Adds a default implementation of <see cref="IDistributedCache"/> that stores items in memory
/// to the <see cref="IServiceCollection" />. Frameworks that require a distributed cache to work
/// can safely add this dependency as part of their dependency list to ensure that there is at least
/// one implementation available.
/// </summary>
/// <remarks>
/// <see cref="AddDistributedMemoryCache(IServiceCollection)"/> should only be used in single
/// server scenarios as this cache stores items in memory and doesn't expand across multiple machines.
/// For those scenarios it is recommended to use a proper distributed cache that can expand across
/// multiple machines.
/// </remarks>
/// <param name="services">The <see cref="IServiceCollection" /> to add services to.</param>
/// <param name="setupAction">
/// The <see cref="Action{MemoryCacheOptions}"/> to configure the <see cref="MemoryCacheOptions"/> that is used by the <see cref="MemoryDistributedCache"/>.
/// </param>
/// <returns>The <see cref="IServiceCollection"/> so that additional calls can be chained.</returns>
public static IServiceCollection AddDistributedMemoryCache(this IServiceCollection services, Action<MemoryCacheOptions> setupAction)
{
if (services == null)
{
throw new ArgumentNullException(nameof(services));
}

if (setupAction == null)
{
throw new ArgumentNullException(nameof(services));
}

var memoryCacheOptions = new MemoryCacheOptions();
setupAction(memoryCacheOptions);

services.TryAddSingleton<IDistributedCache>(new MemoryDistributedCache(new MemoryCache(memoryCacheOptions)));

services.TryAddSingleton<IDistributedCache>(new MemoryDistributedCache(new MemoryCache(new MemoryCacheOptions())));
return services;
}
}
Expand Down

0 comments on commit ff010f1

Please sign in to comment.