mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-25 19:15:51 +03:00
Rename Cache->DeferredCache
This commit is contained in:
parent
7eff59ec91
commit
9f87da0a84
8 changed files with 30 additions and 25 deletions
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY
|
from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import DeferredCache
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
|
|
||||||
|
@ -24,9 +24,9 @@ class SlavedClientIpStore(BaseSlavedStore):
|
||||||
def __init__(self, database: DatabasePool, db_conn, hs):
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
||||||
super().__init__(database, db_conn, hs)
|
super().__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.client_ip_last_seen = Cache(
|
self.client_ip_last_seen = DeferredCache(
|
||||||
name="client_ip_last_seen", keylen=4, max_entries=50000
|
name="client_ip_last_seen", keylen=4, max_entries=50000
|
||||||
) # type: Cache[tuple, int]
|
) # type: DeferredCache[tuple, int]
|
||||||
|
|
||||||
async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id):
|
async def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id):
|
||||||
now = int(self._clock.time_msec())
|
now = int(self._clock.time_msec())
|
||||||
|
|
|
@ -19,7 +19,7 @@ from typing import Dict, Optional, Tuple
|
||||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.database import DatabasePool, make_tuple_comparison_clause
|
from synapse.storage.database import DatabasePool, make_tuple_comparison_clause
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import DeferredCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -410,7 +410,7 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore):
|
||||||
class ClientIpStore(ClientIpWorkerStore):
|
class ClientIpStore(ClientIpWorkerStore):
|
||||||
def __init__(self, database: DatabasePool, db_conn, hs):
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
||||||
|
|
||||||
self.client_ip_last_seen = Cache(
|
self.client_ip_last_seen = DeferredCache(
|
||||||
name="client_ip_last_seen", keylen=4, max_entries=50000
|
name="client_ip_last_seen", keylen=4, max_entries=50000
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ from synapse.storage.database import (
|
||||||
)
|
)
|
||||||
from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key
|
from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key
|
||||||
from synapse.util import json_decoder, json_encoder
|
from synapse.util import json_decoder, json_encoder
|
||||||
from synapse.util.caches.descriptors import Cache, cached, cachedList
|
from synapse.util.caches.descriptors import DeferredCache, cached, cachedList
|
||||||
from synapse.util.iterutils import batch_iter
|
from synapse.util.iterutils import batch_iter
|
||||||
from synapse.util.stringutils import shortstr
|
from synapse.util.stringutils import shortstr
|
||||||
|
|
||||||
|
@ -1004,7 +1004,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
|
|
||||||
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
||||||
# the device exists.
|
# the device exists.
|
||||||
self.device_id_exists_cache = Cache(
|
self.device_id_exists_cache = DeferredCache(
|
||||||
name="device_id_exists", keylen=2, max_entries=10000
|
name="device_id_exists", keylen=2, max_entries=10000
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ from synapse.storage.database import DatabasePool
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
|
from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
|
||||||
from synapse.types import Collection, get_domain_from_id
|
from synapse.types import Collection, get_domain_from_id
|
||||||
from synapse.util.caches.descriptors import Cache, cached
|
from synapse.util.caches.descriptors import DeferredCache, cached
|
||||||
from synapse.util.iterutils import batch_iter
|
from synapse.util.iterutils import batch_iter
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||||
self._cleanup_old_transaction_ids,
|
self._cleanup_old_transaction_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._get_event_cache = Cache(
|
self._get_event_cache = DeferredCache(
|
||||||
"*getEvent*",
|
"*getEvent*",
|
||||||
keylen=3,
|
keylen=3,
|
||||||
max_entries=hs.config.caches.event_cache_size,
|
max_entries=hs.config.caches.event_cache_size,
|
||||||
|
|
|
@ -99,7 +99,7 @@ class CacheEntry:
|
||||||
self.callbacks.clear()
|
self.callbacks.clear()
|
||||||
|
|
||||||
|
|
||||||
class Cache(Generic[KT, VT]):
|
class DeferredCache(Generic[KT, VT]):
|
||||||
"""Wraps an LruCache, adding support for Deferred results.
|
"""Wraps an LruCache, adding support for Deferred results.
|
||||||
|
|
||||||
It expects that each entry added with set() will be a Deferred; likewise get()
|
It expects that each entry added with set() will be a Deferred; likewise get()
|
||||||
|
@ -225,7 +225,10 @@ class Cache(Generic[KT, VT]):
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def set(
|
def set(
|
||||||
self, key: KT, value: defer.Deferred, callback: Optional[Callable[[], None]] = None
|
self,
|
||||||
|
key: KT,
|
||||||
|
value: defer.Deferred,
|
||||||
|
callback: Optional[Callable[[], None]] = None,
|
||||||
) -> ObservableDeferred:
|
) -> ObservableDeferred:
|
||||||
if not isinstance(value, defer.Deferred):
|
if not isinstance(value, defer.Deferred):
|
||||||
raise TypeError("not a Deferred")
|
raise TypeError("not a Deferred")
|
||||||
|
@ -427,13 +430,13 @@ class CacheDescriptor(_CacheDescriptorBase):
|
||||||
self.iterable = iterable
|
self.iterable = iterable
|
||||||
|
|
||||||
def __get__(self, obj, owner):
|
def __get__(self, obj, owner):
|
||||||
cache = Cache(
|
cache = DeferredCache(
|
||||||
name=self.orig.__name__,
|
name=self.orig.__name__,
|
||||||
max_entries=self.max_entries,
|
max_entries=self.max_entries,
|
||||||
keylen=self.num_args,
|
keylen=self.num_args,
|
||||||
tree=self.tree,
|
tree=self.tree,
|
||||||
iterable=self.iterable,
|
iterable=self.iterable,
|
||||||
) # type: Cache[Tuple, Any]
|
) # type: DeferredCache[Tuple, Any]
|
||||||
|
|
||||||
def get_cache_key_gen(args, kwargs):
|
def get_cache_key_gen(args, kwargs):
|
||||||
"""Given some args/kwargs return a generator that resolves into
|
"""Given some args/kwargs return a generator that resolves into
|
||||||
|
@ -677,9 +680,9 @@ class _CacheContext:
|
||||||
|
|
||||||
_cache_context_objects = (
|
_cache_context_objects = (
|
||||||
WeakValueDictionary()
|
WeakValueDictionary()
|
||||||
) # type: WeakValueDictionary[Tuple[Cache, CacheKey], _CacheContext]
|
) # type: WeakValueDictionary[Tuple[DeferredCache, CacheKey], _CacheContext]
|
||||||
|
|
||||||
def __init__(self, cache, cache_key): # type: (Cache, CacheKey) -> None
|
def __init__(self, cache, cache_key): # type: (DeferredCache, CacheKey) -> None
|
||||||
self._cache = cache
|
self._cache = cache
|
||||||
self._cache_key = cache_key
|
self._cache_key = cache_key
|
||||||
|
|
||||||
|
@ -688,7 +691,9 @@ class _CacheContext:
|
||||||
self._cache.invalidate(self._cache_key)
|
self._cache.invalidate(self._cache_key)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_instance(cls, cache, cache_key): # type: (Cache, CacheKey) -> _CacheContext
|
def get_instance(
|
||||||
|
cls, cache, cache_key
|
||||||
|
): # type: (DeferredCache, CacheKey) -> _CacheContext
|
||||||
"""Returns an instance constructed with the given arguments.
|
"""Returns an instance constructed with the given arguments.
|
||||||
|
|
||||||
A new instance is only created if none already exists.
|
A new instance is only created if none already exists.
|
||||||
|
|
|
@ -20,14 +20,14 @@ from mock import Mock
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches.descriptors import Cache, cached
|
from synapse.util.caches.descriptors import DeferredCache, cached
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
|
||||||
class CacheTestCase(unittest.HomeserverTestCase):
|
class DeferredCacheTestCase(unittest.HomeserverTestCase):
|
||||||
def prepare(self, reactor, clock, homeserver):
|
def prepare(self, reactor, clock, homeserver):
|
||||||
self.cache = Cache("test")
|
self.cache = DeferredCache("test")
|
||||||
|
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
failed = False
|
failed = False
|
||||||
|
@ -56,7 +56,7 @@ class CacheTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertTrue(failed)
|
self.assertTrue(failed)
|
||||||
|
|
||||||
def test_eviction(self):
|
def test_eviction(self):
|
||||||
cache = Cache("test", max_entries=2)
|
cache = DeferredCache("test", max_entries=2)
|
||||||
|
|
||||||
cache.prefill(1, "one")
|
cache.prefill(1, "one")
|
||||||
cache.prefill(2, "two")
|
cache.prefill(2, "two")
|
||||||
|
@ -74,7 +74,7 @@ class CacheTestCase(unittest.HomeserverTestCase):
|
||||||
cache.get(3)
|
cache.get(3)
|
||||||
|
|
||||||
def test_eviction_lru(self):
|
def test_eviction_lru(self):
|
||||||
cache = Cache("test", max_entries=2)
|
cache = DeferredCache("test", max_entries=2)
|
||||||
|
|
||||||
cache.prefill(1, "one")
|
cache.prefill(1, "one")
|
||||||
cache.prefill(2, "two")
|
cache.prefill(2, "two")
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.metrics import REGISTRY, InFlightGauge, generate_latest
|
from synapse.metrics import REGISTRY, InFlightGauge, generate_latest
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import DeferredCache
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ class CacheMetricsTests(unittest.HomeserverTestCase):
|
||||||
Caches produce metrics reflecting their state when scraped.
|
Caches produce metrics reflecting their state when scraped.
|
||||||
"""
|
"""
|
||||||
CACHE_NAME = "cache_metrics_test_fgjkbdfg"
|
CACHE_NAME = "cache_metrics_test_fgjkbdfg"
|
||||||
cache = Cache(CACHE_NAME, max_entries=777)
|
cache = DeferredCache(CACHE_NAME, max_entries=777)
|
||||||
|
|
||||||
items = {
|
items = {
|
||||||
x.split(b"{")[0].decode("ascii"): x.split(b" ")[1].decode("ascii")
|
x.split(b"{")[0].decode("ascii"): x.split(b" ")[1].decode("ascii")
|
||||||
|
|
|
@ -42,9 +42,9 @@ def run_on_reactor():
|
||||||
return make_deferred_yieldable(d)
|
return make_deferred_yieldable(d)
|
||||||
|
|
||||||
|
|
||||||
class CacheTestCase(unittest.TestCase):
|
class DeferredCacheTestCase(unittest.TestCase):
|
||||||
def test_invalidate_all(self):
|
def test_invalidate_all(self):
|
||||||
cache = descriptors.Cache("testcache")
|
cache = descriptors.DeferredCache("testcache")
|
||||||
|
|
||||||
callback_record = [False, False]
|
callback_record = [False, False]
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue