Replace DeferredCache with LruCache where possible (#8563)

Most of these uses don't need a full-blown DeferredCache; LruCache is lighter and more appropriate.
This commit is contained in:
Richard van der Hoff 2020-10-19 12:20:29 +01:00 committed by GitHub
parent 79c1f973ce
commit 97647b33c2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 30 additions and 27 deletions

1
changelog.d/8563.misc Normal file
View file

@ -0,0 +1 @@
Replace `DeferredCache` with the lighter-weight `LruCache` where possible.

View file

@ -15,7 +15,7 @@
from synapse.storage.database import DatabasePool from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY
from synapse.util.caches.deferred_cache import DeferredCache from synapse.util.caches.lrucache import LruCache
from ._base import BaseSlavedStore from ._base import BaseSlavedStore
@ -24,9 +24,9 @@ class SlavedClientIpStore(BaseSlavedStore):
def __init__(self, database: DatabasePool, db_conn, hs): def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs) super().__init__(database, db_conn, hs)
self.client_ip_last_seen = DeferredCache( self.client_ip_last_seen = LruCache(
name="client_ip_last_seen", keylen=4, max_entries=50000 cache_name="client_ip_last_seen", keylen=4, max_size=50000
) # type: DeferredCache[tuple, int] ) # type: LruCache[tuple, int]
async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id): async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id):
now = int(self._clock.time_msec()) now = int(self._clock.time_msec())
@ -41,7 +41,7 @@ class SlavedClientIpStore(BaseSlavedStore):
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
return return
self.client_ip_last_seen.prefill(key, now) self.client_ip_last_seen.set(key, now)
self.hs.get_tcp_replication().send_user_ip( self.hs.get_tcp_replication().send_user_ip(
user_id, access_token, ip, user_agent, device_id, now user_id, access_token, ip, user_agent, device_id, now

View file

@ -76,14 +76,16 @@ class SQLBaseStore(metaclass=ABCMeta):
""" """
try: try:
if key is None: cache = getattr(self, cache_name)
getattr(self, cache_name).invalidate_all()
else:
getattr(self, cache_name).invalidate(tuple(key))
except AttributeError: except AttributeError:
# We probably haven't pulled in the cache in this worker, # We probably haven't pulled in the cache in this worker,
# which is fine. # which is fine.
pass return
if key is None:
cache.invalidate_all()
else:
cache.invalidate(tuple(key))
def db_to_json(db_content): def db_to_json(db_content):

View file

@ -19,7 +19,7 @@ from typing import Dict, Optional, Tuple
from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage._base import SQLBaseStore from synapse.storage._base import SQLBaseStore
from synapse.storage.database import DatabasePool, make_tuple_comparison_clause from synapse.storage.database import DatabasePool, make_tuple_comparison_clause
from synapse.util.caches.deferred_cache import DeferredCache from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -410,8 +410,8 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore):
class ClientIpStore(ClientIpWorkerStore): class ClientIpStore(ClientIpWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs): def __init__(self, database: DatabasePool, db_conn, hs):
self.client_ip_last_seen = DeferredCache( self.client_ip_last_seen = LruCache(
name="client_ip_last_seen", keylen=4, max_entries=50000 cache_name="client_ip_last_seen", keylen=4, max_size=50000
) )
super().__init__(database, db_conn, hs) super().__init__(database, db_conn, hs)
@ -442,7 +442,7 @@ class ClientIpStore(ClientIpWorkerStore):
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
return return
self.client_ip_last_seen.prefill(key, now) self.client_ip_last_seen.set(key, now)
self._batch_row_update[key] = (user_agent, device_id, now) self._batch_row_update[key] = (user_agent, device_id, now)

View file

@ -34,8 +34,8 @@ from synapse.storage.database import (
) )
from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key
from synapse.util import json_decoder, json_encoder from synapse.util import json_decoder, json_encoder
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter from synapse.util.iterutils import batch_iter
from synapse.util.stringutils import shortstr from synapse.util.stringutils import shortstr
@ -1005,8 +1005,8 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
# Map of (user_id, device_id) -> bool. If there is an entry that implies # Map of (user_id, device_id) -> bool. If there is an entry that implies
# the device exists. # the device exists.
self.device_id_exists_cache = DeferredCache( self.device_id_exists_cache = LruCache(
name="device_id_exists", keylen=2, max_entries=10000 cache_name="device_id_exists", keylen=2, max_size=10000
) )
async def store_device( async def store_device(
@ -1052,7 +1052,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
) )
if hidden: if hidden:
raise StoreError(400, "The device ID is in use", Codes.FORBIDDEN) raise StoreError(400, "The device ID is in use", Codes.FORBIDDEN)
self.device_id_exists_cache.prefill(key, True) self.device_id_exists_cache.set(key, True)
return inserted return inserted
except StoreError: except StoreError:
raise raise

View file

@ -1051,9 +1051,7 @@ class PersistEventsStore:
def prefill(): def prefill():
for cache_entry in to_prefill: for cache_entry in to_prefill:
self.store._get_event_cache.prefill( self.store._get_event_cache.set((cache_entry[0].event_id,), cache_entry)
(cache_entry[0].event_id,), cache_entry
)
txn.call_after(prefill) txn.call_after(prefill)

View file

@ -42,8 +42,8 @@ from synapse.storage.database import DatabasePool
from synapse.storage.engines import PostgresEngine from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
from synapse.types import Collection, get_domain_from_id from synapse.types import Collection, get_domain_from_id
from synapse.util.caches.deferred_cache import DeferredCache
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter from synapse.util.iterutils import batch_iter
from synapse.util.metrics import Measure from synapse.util.metrics import Measure
@ -146,11 +146,10 @@ class EventsWorkerStore(SQLBaseStore):
self._cleanup_old_transaction_ids, self._cleanup_old_transaction_ids,
) )
self._get_event_cache = DeferredCache( self._get_event_cache = LruCache(
"*getEvent*", cache_name="*getEvent*",
keylen=3, keylen=3,
max_entries=hs.config.caches.event_cache_size, max_size=hs.config.caches.event_cache_size,
apply_cache_factor_from_config=False,
) )
self._event_fetch_lock = threading.Condition() self._event_fetch_lock = threading.Condition()
@ -749,7 +748,7 @@ class EventsWorkerStore(SQLBaseStore):
event=original_ev, redacted_event=redacted_event event=original_ev, redacted_event=redacted_event
) )
self._get_event_cache.prefill((event_id,), cache_entry) self._get_event_cache.set((event_id,), cache_entry)
result_map[event_id] = cache_entry result_map[event_id] = cache_entry
return result_map return result_map

View file

@ -337,6 +337,9 @@ class LruCache(Generic[KT, VT]):
self.set = cache_set self.set = cache_set
self.setdefault = cache_set_default self.setdefault = cache_set_default
self.pop = cache_pop self.pop = cache_pop
# `invalidate` is exposed for consistency with DeferredCache, so that it can be
# invalidated by the cache invalidation replication stream.
self.invalidate = cache_pop
if cache_type is TreeCache: if cache_type is TreeCache:
self.del_multi = cache_del_multi self.del_multi = cache_del_multi
self.len = synchronized(cache_len) self.len = synchronized(cache_len)