Skip to content

Commit

Permalink
Optimize cache to lock per key
Browse files Browse the repository at this point in the history
  • Loading branch information
jjxtra committed Sep 20, 2020
1 parent b53ccd9 commit 04abe50
Showing 1 changed file with 22 additions and 13 deletions.
35 changes: 22 additions & 13 deletions LazyCache/CachingService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public class CachingService : IAppCache
{
private readonly Lazy<ICacheProvider> cacheProvider;

private readonly SemaphoreSlim locker = new SemaphoreSlim(1, 1);
private readonly int[] keyLocks = new int[8192];

public CachingService() : this(DefaultCacheProvider)
{
Expand Down Expand Up @@ -104,14 +104,17 @@ object CacheFactory(ICacheEntry entry) =>
return result;
});

locker.Wait(); //TODO: do we really need this? Could we just lock on the key? like this? https://github.com/zkSNACKs/WalletWasabi/blob/7780db075685d2dc13620e0bcf6cc07578b627c2/WalletWasabi/Extensions/MemoryExtensions.cs
// acquire lock per key
uint hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }

try
{
cacheItem = CacheProvider.GetOrCreate<object>(key, policy, CacheFactory);
}
finally
{
locker.Release();
keyLocks[hash] = 0;
}

try
Expand All @@ -122,14 +125,18 @@ object CacheFactory(ICacheEntry entry) =>
if (valueHasChangedType)
{
CacheProvider.Remove(key);
locker.Wait(); //TODO: do we really need this? Could we just lock on the key?

// acquire lock again
hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }

try
{
cacheItem = CacheProvider.GetOrCreate<object>(key, CacheFactory);
}
finally
{
locker.Release();
keyLocks[hash] = 0;
}
result = GetValueFromLazy<T>(cacheItem, out _ /* we just evicted so type change cannot happen this time */);
}
Expand Down Expand Up @@ -176,9 +183,9 @@ public virtual async Task<T> GetOrAddAsync<T>(string key, Func<ICacheEntry, Task
// below, and guarded using the async lazy. Here we just ensure only one thread can place
// the AsyncLazy into the cache at one time

await locker.WaitAsync()
.ConfigureAwait(
false); //TODO: do we really need to lock everything here - faster if we could lock on just the key?
// acquire lock
uint hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }

object CacheFactory(ICacheEntry entry) =>
new AsyncLazy<T>(() =>
Expand All @@ -195,7 +202,7 @@ object CacheFactory(ICacheEntry entry) =>
}
finally
{
locker.Release();
keyLocks[hash] = 0;
}

try
Expand All @@ -206,16 +213,18 @@ object CacheFactory(ICacheEntry entry) =>
if (valueHasChangedType)
{
CacheProvider.Remove(key);
await locker.WaitAsync()
.ConfigureAwait(
false); //TODO: do we really need to lock everything here - faster if we could lock on just the key?

// acquire lock
hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }

try
{
cacheItem = CacheProvider.GetOrCreate<object>(key, CacheFactory);
}
finally
{
locker.Release();
keyLocks[hash] = 0;
}
result = GetValueFromAsyncLazy<T>(cacheItem, out _ /* we just evicted so type change cannot happen this time */);
}
Expand Down

0 comments on commit 04abe50

Please sign in to comment.