-
Notifications
You must be signed in to change notification settings - Fork 2
/
funcache.go
133 lines (109 loc) · 3.74 KB
/
funcache.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
// Package funcache provides simple, fine-grained caching of function values.
package funcache
import (
"sync"
"sync/atomic"
)
// Store is any backing store used by the cache. Note that the cache doesn't do
// any eviction of keys. That's up to your particular store to manage, however
// it sees fit.
type Store interface {
Add(key, value interface{})
Get(key interface{}) (value interface{}, ok bool)
// Contains(key interface{}) bool
// Peek(key interface{}) (interface{}, bool)
// Purge()
// Remove(key interface{})
}
// -----------------------------------------------------------------------------
// Dummy store, used for testing and init().
type nilStore struct{}
func (*nilStore) Add(key, value interface{}) { return }
func (*nilStore) Get(key interface{}) (value interface{}, ok bool) { return }
func nilCache() *Cache { return New(&nilStore{}) }
// -----------------------------------------------------------------------------
// Simple in-memory map, safe for concurrent access.
type syncMap struct {
sync.RWMutex
m map[interface{}]interface{}
}
func newSyncMap() *syncMap {
return &syncMap{m: make(map[interface{}]interface{})}
}
func (sm *syncMap) Add(key, value interface{}) {
sm.Lock()
defer sm.Unlock()
sm.m[key] = value
}
func (sm *syncMap) Get(key interface{}) (value interface{}, ok bool) {
sm.RLock()
defer sm.RUnlock()
value, ok = sm.m[key]
return
}
// -----------------------------------------------------------------------------
// Copy-on-write in-memory map, safe for concurrent access.
type cowMap struct {
sync.Mutex // Used only when writing
m atomic.Value
}
func newCopyOnWriteMap() *cowMap {
cm := &cowMap{}
cm.m.Store(make(map[interface{}]interface{}))
return cm
}
func (cm *cowMap) Add(key, value interface{}) {
cm.Lock()
defer cm.Unlock()
m1 := cm.m.Load().(map[interface{}]interface{})
m2 := make(map[interface{}]interface{})
for k, v := range m1 {
m2[k] = v
}
m2[key] = value
cm.m.Store(m2)
}
func (cm *cowMap) Get(key interface{}) (value interface{}, ok bool) {
m := cm.m.Load().(map[interface{}]interface{})
value, ok = m[key]
return
}
// -----------------------------------------------------------------------------
type Cache struct {
store Store
// Small optimization: maintain a counter of actively cache busting callers.
// If no one is cache busting, then don't go through the extra effort of
// checking the caller stack.
busting uint32
}
// New returns a Cache backed by the store you provide.
func New(store Store) *Cache { return &Cache{store: store} }
// NewInMemCache returns a Cache backed by a simple in-memory map, safe for
// concurrent access.
func NewInMemCache() *Cache { return New(newSyncMap()) }
// Bust calls the given function, invalidating any cached values in nested
// function calls.
func (cache *Cache) Bust(fn func()) {
atomic.AddUint32(&cache.busting, 1) // Increment
defer atomic.AddUint32(&cache.busting, ^uint32(0)) // Decrement
fn()
}
// Cache takes a function and caches its return value. It saves it in the store
// under the given key. Subsequent calls to Cache, with the same key, will return
// the cached value (if it still exists in the store), otherwise the function
// will be called again.
func (cache *Cache) Cache(key interface{}, fn func() interface{}) interface{} {
if atomic.LoadUint32(&cache.busting) == 0 || !wasCalledByCacheBustingFn() {
if data, ok := cache.store.Get(key); ok {
return data
}
}
data := fn()
cache.store.Add(key, data)
return data
}
// Wrap caches the return value of the given function. It is the same as Cache,
// except that it auto-assigns a cache key, which is just the function name.
func (cache *Cache) Wrap(fn func() interface{}) interface{} {
return cache.Cache(getFnName(fn), fn)
}