Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Replace DeferredCache with LruCache where possible #8563

Merged
merged 1 commit into from
Oct 19, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/8563.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Replace `DeferredCache` with the lighter-weight `LruCache` where possible.
10 changes: 5 additions & 5 deletions synapse/replication/slave/storage/client_ips.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.lrucache import LruCache

from ._base import BaseSlavedStore

Expand All @@ -24,9 +24,9 @@ class SlavedClientIpStore(BaseSlavedStore):
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)

self.client_ip_last_seen = DeferredCache(
name="client_ip_last_seen", keylen=4, max_entries=50000
) # type: DeferredCache[tuple, int]
self.client_ip_last_seen = LruCache(
cache_name="client_ip_last_seen", keylen=4, max_size=50000
) # type: LruCache[tuple, int]

async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id):
now = int(self._clock.time_msec())
Expand All @@ -41,7 +41,7 @@ async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_i
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
return

self.client_ip_last_seen.prefill(key, now)
self.client_ip_last_seen.set(key, now)

self.hs.get_tcp_replication().send_user_ip(
user_id, access_token, ip, user_agent, device_id, now
Expand Down
12 changes: 7 additions & 5 deletions synapse/storage/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,16 @@ def _attempt_to_invalidate_cache(
"""

try:
if key is None:
getattr(self, cache_name).invalidate_all()
Comment on lines 77 to -80
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm changing this because the fact that the try/except wrapped more than the getattr meant that an important error got swallowed.

else:
getattr(self, cache_name).invalidate(tuple(key))
cache = getattr(self, cache_name)
except AttributeError:
# We probably haven't pulled in the cache in this worker,
# which is fine.
pass
return

if key is None:
cache.invalidate_all()
else:
cache.invalidate(tuple(key))


def db_to_json(db_content):
Expand Down
8 changes: 4 additions & 4 deletions synapse/storage/databases/main/client_ips.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import DatabasePool, make_tuple_comparison_clause
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.lrucache import LruCache

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -410,8 +410,8 @@ def _prune_old_user_ips_txn(txn):
class ClientIpStore(ClientIpWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs):

self.client_ip_last_seen = DeferredCache(
name="client_ip_last_seen", keylen=4, max_entries=50000
self.client_ip_last_seen = LruCache(
cache_name="client_ip_last_seen", keylen=4, max_size=50000
clokep marked this conversation as resolved.
Show resolved Hide resolved
)

super().__init__(database, db_conn, hs)
Expand Down Expand Up @@ -442,7 +442,7 @@ async def insert_client_ip(
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
return

self.client_ip_last_seen.prefill(key, now)
self.client_ip_last_seen.set(key, now)

self._batch_row_update[key] = (user_agent, device_id, now)

Expand Down
8 changes: 4 additions & 4 deletions synapse/storage/databases/main/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
)
from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key
from synapse.util import json_decoder, json_encoder
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter
from synapse.util.stringutils import shortstr

Expand Down Expand Up @@ -1005,8 +1005,8 @@ def __init__(self, database: DatabasePool, db_conn, hs):

# Map of (user_id, device_id) -> bool. If there is an entry that implies
# the device exists.
self.device_id_exists_cache = DeferredCache(
name="device_id_exists", keylen=2, max_entries=10000
self.device_id_exists_cache = LruCache(
cache_name="device_id_exists", keylen=2, max_size=10000
)

async def store_device(
Expand Down Expand Up @@ -1052,7 +1052,7 @@ async def store_device(
)
if hidden:
raise StoreError(400, "The device ID is in use", Codes.FORBIDDEN)
self.device_id_exists_cache.prefill(key, True)
self.device_id_exists_cache.set(key, True)
return inserted
except StoreError:
raise
Expand Down
4 changes: 1 addition & 3 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -1051,9 +1051,7 @@ def _add_to_cache(self, txn, events_and_contexts):

def prefill():
for cache_entry in to_prefill:
self.store._get_event_cache.prefill(
(cache_entry[0].event_id,), cache_entry
)
self.store._get_event_cache.set((cache_entry[0].event_id,), cache_entry)

txn.call_after(prefill)

Expand Down
11 changes: 5 additions & 6 deletions synapse/storage/databases/main/events_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@
from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
from synapse.types import Collection, get_domain_from_id
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.descriptors import cached
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter
from synapse.util.metrics import Measure

Expand Down Expand Up @@ -146,11 +146,10 @@ def __init__(self, database: DatabasePool, db_conn, hs):
self._cleanup_old_transaction_ids,
)

self._get_event_cache = DeferredCache(
"*getEvent*",
self._get_event_cache = LruCache(
cache_name="*getEvent*",
keylen=3,
max_entries=hs.config.caches.event_cache_size,
apply_cache_factor_from_config=False,
max_size=hs.config.caches.event_cache_size,
)

self._event_fetch_lock = threading.Condition()
Expand Down Expand Up @@ -749,7 +748,7 @@ async def _get_events_from_db(self, event_ids, allow_rejected=False):
event=original_ev, redacted_event=redacted_event
)

self._get_event_cache.prefill((event_id,), cache_entry)
self._get_event_cache.set((event_id,), cache_entry)
result_map[event_id] = cache_entry

return result_map
Expand Down
3 changes: 3 additions & 0 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,9 @@ def cache_contains(key: KT) -> bool:
self.set = cache_set
self.setdefault = cache_set_default
self.pop = cache_pop
# `invalidate` is exposed for consistency with DeferredCache, so that it can be
# invalidated by the cache invalidation replication stream.
self.invalidate = cache_pop
if cache_type is TreeCache:
self.del_multi = cache_del_multi
self.len = synchronized(cache_len)
Expand Down