mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-24 10:35:46 +03:00
Merge branch 'develop' of github.com:matrix-org/synapse into erikj/receipts_take2
This commit is contained in:
commit
129ee4e149
3 changed files with 18 additions and 51 deletions
|
@ -16,3 +16,6 @@ ignore =
|
||||||
docs/*
|
docs/*
|
||||||
pylint.cfg
|
pylint.cfg
|
||||||
tox.ini
|
tox.ini
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
max-line-length = 90
|
||||||
|
|
|
@ -237,9 +237,7 @@ class FederationHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def redact_disallowed(event_and_state):
|
def redact_disallowed(event, state):
|
||||||
event, state = event_and_state
|
|
||||||
|
|
||||||
if not state:
|
if not state:
|
||||||
return event
|
return event
|
||||||
|
|
||||||
|
@ -273,13 +271,11 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
return event
|
return event
|
||||||
|
|
||||||
res = map(redact_disallowed, [
|
defer.returnValue([
|
||||||
(e, event_to_state[e.event_id])
|
redact_disallowed(e, event_to_state[e.event_id])
|
||||||
for e in events
|
for e in events
|
||||||
])
|
])
|
||||||
|
|
||||||
defer.returnValue(res)
|
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def backfill(self, dest, room_id, limit, extremities=[]):
|
def backfill(self, dest, room_id, limit, extremities=[]):
|
||||||
|
|
|
@ -60,7 +60,6 @@ class StateStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = set(event_to_groups.values())
|
groups = set(event_to_groups.values())
|
||||||
|
|
||||||
group_to_state = yield self._get_state_for_groups(groups)
|
group_to_state = yield self._get_state_for_groups(groups)
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
|
@ -201,10 +200,7 @@ class StateStore(SQLBaseStore):
|
||||||
|
|
||||||
txn.execute(sql, args)
|
txn.execute(sql, args)
|
||||||
|
|
||||||
results[group] = [
|
results[group] = [r[0] for r in txn.fetchall()]
|
||||||
r[0]
|
|
||||||
for r in txn.fetchall()
|
|
||||||
]
|
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -235,20 +231,14 @@ class StateStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = set(event_to_groups.values())
|
groups = set(event_to_groups.values())
|
||||||
|
group_to_state = yield self._get_state_for_groups(groups, types)
|
||||||
group_to_state = yield self._get_state_for_groups(
|
|
||||||
groups, types
|
|
||||||
)
|
|
||||||
|
|
||||||
event_to_state = {
|
event_to_state = {
|
||||||
event_id: group_to_state[group]
|
event_id: group_to_state[group]
|
||||||
for event_id, group in event_to_groups.items()
|
for event_id, group in event_to_groups.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
||||||
event: event_to_state[event]
|
|
||||||
for event in event_ids
|
|
||||||
})
|
|
||||||
|
|
||||||
@cached(num_args=2, lru=True, max_entries=100000)
|
@cached(num_args=2, lru=True, max_entries=100000)
|
||||||
def _get_state_group_for_event(self, room_id, event_id):
|
def _get_state_group_for_event(self, room_id, event_id):
|
||||||
|
@ -282,10 +272,7 @@ class StateStore(SQLBaseStore):
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
return self.runInteraction(
|
return self.runInteraction("_get_state_group_for_events", f)
|
||||||
"_get_state_group_for_events",
|
|
||||||
f,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_some_state_from_cache(self, group, types):
|
def _get_some_state_from_cache(self, group, types):
|
||||||
"""Checks if group is in cache. See `_get_state_for_groups`
|
"""Checks if group is in cache. See `_get_state_for_groups`
|
||||||
|
@ -332,8 +319,7 @@ class StateStore(SQLBaseStore):
|
||||||
got_all = not (missing_types or types is None)
|
got_all = not (missing_types or types is None)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
k: v
|
k: v for k, v in state_dict.items()
|
||||||
for k, v in state_dict.items()
|
|
||||||
if include(k[0], k[1])
|
if include(k[0], k[1])
|
||||||
}, missing_types, got_all
|
}, missing_types, got_all
|
||||||
|
|
||||||
|
@ -364,20 +350,15 @@ class StateStore(SQLBaseStore):
|
||||||
state_dict, missing_types, got_all = self._get_some_state_from_cache(
|
state_dict, missing_types, got_all = self._get_some_state_from_cache(
|
||||||
group, types
|
group, types
|
||||||
)
|
)
|
||||||
|
|
||||||
results[group] = state_dict
|
results[group] = state_dict
|
||||||
|
|
||||||
if not got_all:
|
if not got_all:
|
||||||
missing_groups_and_types.append((
|
missing_groups_and_types.append((group, missing_types))
|
||||||
group,
|
|
||||||
missing_types
|
|
||||||
))
|
|
||||||
else:
|
else:
|
||||||
for group in set(groups):
|
for group in set(groups):
|
||||||
state_dict, got_all = self._get_all_state_from_cache(
|
state_dict, got_all = self._get_all_state_from_cache(
|
||||||
group
|
group
|
||||||
)
|
)
|
||||||
|
|
||||||
results[group] = state_dict
|
results[group] = state_dict
|
||||||
|
|
||||||
if not got_all:
|
if not got_all:
|
||||||
|
@ -405,10 +386,7 @@ class StateStore(SQLBaseStore):
|
||||||
get_prev_content=False
|
get_prev_content=False
|
||||||
)
|
)
|
||||||
|
|
||||||
state_events = {
|
state_events = {e.event_id: e for e in state_events}
|
||||||
e.event_id: e
|
|
||||||
for e in state_events
|
|
||||||
}
|
|
||||||
|
|
||||||
# Now we want to update the cache with all the things we fetched
|
# Now we want to update the cache with all the things we fetched
|
||||||
# from the database.
|
# from the database.
|
||||||
|
@ -418,22 +396,14 @@ class StateStore(SQLBaseStore):
|
||||||
# cache absence of the key, on the assumption that if we've
|
# cache absence of the key, on the assumption that if we've
|
||||||
# explicitly asked for some types then we will probably ask
|
# explicitly asked for some types then we will probably ask
|
||||||
# for them again.
|
# for them again.
|
||||||
state_dict = {
|
state_dict = {key: None for key in types}
|
||||||
key: None
|
|
||||||
for key in types
|
|
||||||
}
|
|
||||||
state_dict.update(results[group])
|
state_dict.update(results[group])
|
||||||
else:
|
else:
|
||||||
state_dict = results[group]
|
state_dict = results[group]
|
||||||
|
|
||||||
evs = [
|
for event_id in state_ids:
|
||||||
state_events[e_id] for e_id in state_ids
|
state_event = state_events[event_id]
|
||||||
if e_id in state_events # This can happen if event is rejected.
|
state_dict[(state_event.type, state_event.state_key)] = state_event
|
||||||
]
|
|
||||||
state_dict.update({
|
|
||||||
(e.type, e.state_key): e
|
|
||||||
for e in evs
|
|
||||||
})
|
|
||||||
|
|
||||||
self._state_group_cache.update(
|
self._state_group_cache.update(
|
||||||
cache_seq_num,
|
cache_seq_num,
|
||||||
|
@ -443,9 +413,7 @@ class StateStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
results[group].update({
|
results[group].update({
|
||||||
key: value
|
key: value for key, value in state_dict.items() if value
|
||||||
for key, value in state_dict.items()
|
|
||||||
if value
|
|
||||||
})
|
})
|
||||||
|
|
||||||
defer.returnValue(results)
|
defer.returnValue(results)
|
||||||
|
|
Loading…
Reference in a new issue