Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Fix LruCache. Make TreeCache track its own size. #538

Merged
merged 6 commits into from
Jan 29, 2016
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 5 additions & 6 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class LruCache(object):
"""
def __init__(self, max_size, keylen=1, cache_type=dict):
cache = cache_type()
self.size = 0
self.cache = cache # Used for introspection.
list_root = []
list_root[:] = [list_root, list_root, None, None]

Expand All @@ -60,7 +60,6 @@ def add_node(key, value):
prev_node[NEXT] = node
next_node[PREV] = node
cache[key] = node
self.size += 1

def move_node_to_front(node):
prev_node = node[PREV]
Expand All @@ -79,7 +78,6 @@ def delete_node(node):
next_node = node[NEXT]
prev_node[NEXT] = next_node
next_node[PREV] = prev_node
self.size -= 1

@synchronized
def cache_get(key, default=None):
Expand All @@ -98,7 +96,7 @@ def cache_set(key, value):
node[VALUE] = value
else:
add_node(key, value)
if self.size > max_size:
if len(cache) > max_size:
todelete = list_root[PREV]
delete_node(todelete)
cache.pop(todelete[KEY], None)
Expand All @@ -110,7 +108,7 @@ def cache_set_default(key, value):
return node[VALUE]
else:
add_node(key, value)
if self.size > max_size:
if len(cache) > max_size:
todelete = list_root[PREV]
delete_node(todelete)
cache.pop(todelete[KEY], None)
Expand Down Expand Up @@ -142,10 +140,11 @@ def cache_clear():
list_root[NEXT] = list_root
list_root[PREV] = list_root
cache.clear()
self.size = 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this still here?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also why didn't this fix the problem OOI?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I dislike the LruCache trying to keep track of the size of the underlying cache. I much prefer that caches/collections keep track of their own size.


@synchronized
def cache_len():
return self.size
return len(cache)

@synchronized
def cache_contains(key):
Expand Down
36 changes: 34 additions & 2 deletions synapse/util/caches/treecache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ class TreeCache(object):
Keys must be tuples.
"""
def __init__(self):
self.size = 0
self.root = {}

def __setitem__(self, key, value):
Expand All @@ -20,17 +21,19 @@ def set(self, key, value):
node = self.root
for k in key[:-1]:
node = node.setdefault(k, {})
node[key[-1]] = value
node[key[-1]] = _Entry(value)
self.size += 1

def get(self, key, default=None):
node = self.root
for k in key[:-1]:
node = node.get(k, None)
if node is None:
return default
return node.get(key[-1], default)
return node.get(key[-1], _Entry(default)).value

def clear(self):
self.size = 0
self.root = {}

def pop(self, key, default=None):
Expand All @@ -57,4 +60,33 @@ def pop(self, key, default=None):
break
node_and_keys[i+1][0].pop(k)

popped, cnt = _strip_and_count_entires(popped)
self.size -= cnt
return popped
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you are going to enumerate the popped tree here then you might as well return a list of popped entries for the LruCache to remove from its linked list.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd quite like to keep the pop method returning a single item when given a full key, as that matches expectations.


def __len__(self):
return self.size


class _Entry(object):
__slots__ = ["value"]

def __init__(self, value):
object.__setattr__(self, "value", value)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not just self.value = value?



def _strip_and_count_entires(d):
"""Takes an _Entry or dict with leaves of _Entry's, and either returns the
value or a dictionary with _Entry's replaced by their values.

Also returns the count of _Entry's
"""
if isinstance(d, dict):
cnt = 0
for key, value in d.items():
v, n = _strip_and_count_entires(value)
d[key] = v
cnt += n
return d, cnt
else:
return d.value, 1