mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-22 17:46:08 +03:00
Merge branch 'notifier_unify' into notifier_performance
This commit is contained in:
commit
78672a9fd5
3 changed files with 28 additions and 24 deletions
|
@ -316,7 +316,7 @@ class Notifier(object):
|
|||
user, getattr(from_token, keyname), limit,
|
||||
)
|
||||
events.extend(stuff)
|
||||
end_token = from_token.copy_and_replace(keyname, new_key)
|
||||
end_token = end_token.copy_and_replace(keyname, new_key)
|
||||
|
||||
if events:
|
||||
defer.returnValue((events, (from_token, end_token)))
|
||||
|
|
|
@ -23,6 +23,7 @@ from synapse.crypto.event_signing import compute_event_reference_hash
|
|||
|
||||
from syutil.base64util import decode_base64
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from contextlib import contextmanager
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -41,17 +42,25 @@ class EventsStore(SQLBaseStore):
|
|||
self.min_token -= 1
|
||||
stream_ordering = self.min_token
|
||||
|
||||
if stream_ordering is None:
|
||||
stream_ordering_manager = yield self._stream_id_gen.get_next(self)
|
||||
else:
|
||||
@contextmanager
|
||||
def stream_ordering_manager():
|
||||
yield stream_ordering
|
||||
|
||||
try:
|
||||
yield self.runInteraction(
|
||||
"persist_event",
|
||||
self._persist_event_txn,
|
||||
event=event,
|
||||
context=context,
|
||||
backfilled=backfilled,
|
||||
stream_ordering=stream_ordering,
|
||||
is_new_state=is_new_state,
|
||||
current_state=current_state,
|
||||
)
|
||||
with stream_ordering_manager as stream_ordering:
|
||||
yield self.runInteraction(
|
||||
"persist_event",
|
||||
self._persist_event_txn,
|
||||
event=event,
|
||||
context=context,
|
||||
backfilled=backfilled,
|
||||
stream_ordering=stream_ordering,
|
||||
is_new_state=is_new_state,
|
||||
current_state=current_state,
|
||||
)
|
||||
except _RollbackButIsFineException:
|
||||
pass
|
||||
|
||||
|
@ -95,15 +104,6 @@ class EventsStore(SQLBaseStore):
|
|||
# Remove the any existing cache entries for the event_id
|
||||
txn.call_after(self._invalidate_get_event_cache, event.event_id)
|
||||
|
||||
if stream_ordering is None:
|
||||
with self._stream_id_gen.get_next_txn(txn) as stream_ordering:
|
||||
return self._persist_event_txn(
|
||||
txn, event, context, backfilled,
|
||||
stream_ordering=stream_ordering,
|
||||
is_new_state=is_new_state,
|
||||
current_state=current_state,
|
||||
)
|
||||
|
||||
# We purposefully do this first since if we include a `current_state`
|
||||
# key, we *want* to update the `current_state_events` table
|
||||
if current_state:
|
||||
|
|
|
@ -78,14 +78,18 @@ class StreamIdGenerator(object):
|
|||
self._current_max = None
|
||||
self._unfinished_ids = deque()
|
||||
|
||||
def get_next_txn(self, txn):
|
||||
@defer.inlineCallbacks
|
||||
def get_next(self, store):
|
||||
"""
|
||||
Usage:
|
||||
with stream_id_gen.get_next_txn(txn) as stream_id:
|
||||
with yield stream_id_gen.get_next as stream_id:
|
||||
# ... persist event ...
|
||||
"""
|
||||
if not self._current_max:
|
||||
self._get_or_compute_current_max(txn)
|
||||
yield store.runInteraction(
|
||||
"_compute_current_max",
|
||||
self._get_or_compute_current_max,
|
||||
)
|
||||
|
||||
with self._lock:
|
||||
self._current_max += 1
|
||||
|
@ -101,7 +105,7 @@ class StreamIdGenerator(object):
|
|||
with self._lock:
|
||||
self._unfinished_ids.remove(next_id)
|
||||
|
||||
return manager()
|
||||
defer.returnValue(manager())
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_max_token(self, store):
|
||||
|
|
Loading…
Reference in a new issue