mirror of
https://github.com/element-hq/synapse.git
synced 2024-12-19 17:56:19 +03:00
Merge pull request #1117 from matrix-org/erikj/fix_state
Ensure we don't mutate state cache entries
This commit is contained in:
commit
264a48aedf
3 changed files with 30 additions and 20 deletions
|
@ -1585,10 +1585,12 @@ class FederationHandler(BaseHandler):
|
|||
current_state = set(e.event_id for e in auth_events.values())
|
||||
different_auth = event_auth_events - current_state
|
||||
|
||||
context.current_state_ids = dict(context.current_state_ids)
|
||||
context.current_state_ids.update({
|
||||
k: a.event_id for k, a in auth_events.items()
|
||||
if k != event_key
|
||||
})
|
||||
context.prev_state_ids = dict(context.prev_state_ids)
|
||||
context.prev_state_ids.update({
|
||||
k: a.event_id for k, a in auth_events.items()
|
||||
})
|
||||
|
@ -1670,10 +1672,12 @@ class FederationHandler(BaseHandler):
|
|||
# 4. Look at rejects and their proofs.
|
||||
# TODO.
|
||||
|
||||
context.current_state_ids = dict(context.current_state_ids)
|
||||
context.current_state_ids.update({
|
||||
k: a.event_id for k, a in auth_events.items()
|
||||
if k != event_key
|
||||
})
|
||||
context.prev_state_ids = dict(context.prev_state_ids)
|
||||
context.prev_state_ids.update({
|
||||
k: a.event_id for k, a in auth_events.items()
|
||||
})
|
||||
|
|
|
@ -26,6 +26,7 @@ from synapse.events.snapshot import EventContext
|
|||
from synapse.util.async import Linearizer
|
||||
|
||||
from collections import namedtuple
|
||||
from frozendict import frozendict
|
||||
|
||||
import logging
|
||||
import hashlib
|
||||
|
@ -58,11 +59,11 @@ class _StateCacheEntry(object):
|
|||
__slots__ = ["state", "state_group", "state_id", "prev_group", "delta_ids"]
|
||||
|
||||
def __init__(self, state, state_group, prev_group=None, delta_ids=None):
|
||||
self.state = state
|
||||
self.state = frozendict(state)
|
||||
self.state_group = state_group
|
||||
|
||||
self.prev_group = prev_group
|
||||
self.delta_ids = delta_ids
|
||||
self.delta_ids = frozendict(delta_ids) if delta_ids is not None else None
|
||||
|
||||
# The `state_id` is a unique ID we generate that can be used as ID for
|
||||
# this collection of state. Usually this would be the same as the
|
||||
|
@ -237,13 +238,7 @@ class StateHandler(object):
|
|||
context.prev_state_ids = curr_state
|
||||
if event.is_state():
|
||||
context.state_group = self.store.get_next_state_group()
|
||||
else:
|
||||
if entry.state_group is None:
|
||||
entry.state_group = self.store.get_next_state_group()
|
||||
entry.state_id = entry.state_group
|
||||
context.state_group = entry.state_group
|
||||
|
||||
if event.is_state():
|
||||
key = (event.type, event.state_key)
|
||||
if key in context.prev_state_ids:
|
||||
replaces = context.prev_state_ids[key]
|
||||
|
@ -255,10 +250,15 @@ class StateHandler(object):
|
|||
context.prev_group = entry.prev_group
|
||||
context.delta_ids = entry.delta_ids
|
||||
if context.delta_ids is not None:
|
||||
context.delta_ids = dict(context.delta_ids)
|
||||
context.delta_ids[key] = event.event_id
|
||||
else:
|
||||
context.current_state_ids = context.prev_state_ids
|
||||
if entry.state_group is None:
|
||||
entry.state_group = self.store.get_next_state_group()
|
||||
entry.state_id = entry.state_group
|
||||
|
||||
context.state_group = entry.state_group
|
||||
context.current_state_ids = context.prev_state_ids
|
||||
context.prev_group = entry.prev_group
|
||||
context.delta_ids = entry.delta_ids
|
||||
|
||||
|
|
|
@ -817,16 +817,24 @@ class StateStore(SQLBaseStore):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def _background_index_state(self, progress, batch_size):
|
||||
def reindex_txn(txn):
|
||||
def reindex_txn(conn):
|
||||
conn.rollback()
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
txn.execute(
|
||||
"CREATE INDEX CONCURRENTLY state_groups_state_type_idx"
|
||||
" ON state_groups_state(state_group, type, state_key)"
|
||||
)
|
||||
txn.execute(
|
||||
"DROP INDEX IF EXISTS state_groups_state_id"
|
||||
)
|
||||
# postgres insists on autocommit for the index
|
||||
conn.set_session(autocommit=True)
|
||||
try:
|
||||
txn = conn.cursor()
|
||||
txn.execute(
|
||||
"CREATE INDEX CONCURRENTLY state_groups_state_type_idx"
|
||||
" ON state_groups_state(state_group, type, state_key)"
|
||||
)
|
||||
txn.execute(
|
||||
"DROP INDEX IF EXISTS state_groups_state_id"
|
||||
)
|
||||
finally:
|
||||
conn.set_session(autocommit=False)
|
||||
else:
|
||||
txn = conn.cursor()
|
||||
txn.execute(
|
||||
"CREATE INDEX state_groups_state_type_idx"
|
||||
" ON state_groups_state(state_group, type, state_key)"
|
||||
|
@ -835,9 +843,7 @@ class StateStore(SQLBaseStore):
|
|||
"DROP INDEX IF EXISTS state_groups_state_id"
|
||||
)
|
||||
|
||||
yield self.runInteraction(
|
||||
self.STATE_GROUP_INDEX_UPDATE_NAME, reindex_txn
|
||||
)
|
||||
yield self.runWithConnection(reindex_txn)
|
||||
|
||||
yield self._end_background_update(self.STATE_GROUP_INDEX_UPDATE_NAME)
|
||||
|
||||
|
|
Loading…
Reference in a new issue