mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-26 19:47:05 +03:00
Keep around the old (buggy) version of the prune_event function so that we can use it to check signatures for events on old servers
This commit is contained in:
parent
6efd4d1649
commit
0dd3aea319
4 changed files with 92 additions and 137 deletions
|
@ -102,8 +102,6 @@ class Auth(object):
|
|||
def check_host_in_room(self, room_id, host):
|
||||
curr_state = yield self.state.get_current_state(room_id)
|
||||
|
||||
logger.debug("Got curr_state %s", curr_state)
|
||||
|
||||
for event in curr_state:
|
||||
if event.type == EventTypes.Member:
|
||||
try:
|
||||
|
|
|
@ -94,6 +94,85 @@ def prune_event(event):
|
|||
)
|
||||
|
||||
|
||||
def old_prune_event(event):
|
||||
"""This is an old and buggy version of the prune event function. The
|
||||
difference between this and the new version is that when including dicts
|
||||
in the content they were included as frozen_dicts rather than dicts. This
|
||||
caused the JSON encoder to encode as a list of the keys rather than the
|
||||
dict.
|
||||
"""
|
||||
event_type = event.type
|
||||
|
||||
allowed_keys = [
|
||||
"event_id",
|
||||
"sender",
|
||||
"room_id",
|
||||
"hashes",
|
||||
"signatures",
|
||||
"content",
|
||||
"type",
|
||||
"state_key",
|
||||
"depth",
|
||||
"prev_events",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
"membership",
|
||||
]
|
||||
|
||||
event_dict = event.get_dict()
|
||||
|
||||
new_content = {}
|
||||
|
||||
def add_fields(*fields):
|
||||
for field in fields:
|
||||
if field in event.content:
|
||||
# This is the line that is buggy: event.content may return
|
||||
# a frozen_dict which the json encoders encode as lists rather
|
||||
# than dicts.
|
||||
new_content[field] = event.content[field]
|
||||
|
||||
if event_type == EventTypes.Member:
|
||||
add_fields("membership")
|
||||
elif event_type == EventTypes.Create:
|
||||
add_fields("creator")
|
||||
elif event_type == EventTypes.JoinRules:
|
||||
add_fields("join_rule")
|
||||
elif event_type == EventTypes.PowerLevels:
|
||||
add_fields(
|
||||
"users",
|
||||
"users_default",
|
||||
"events",
|
||||
"events_default",
|
||||
"events_default",
|
||||
"state_default",
|
||||
"ban",
|
||||
"kick",
|
||||
"redact",
|
||||
)
|
||||
elif event_type == EventTypes.Aliases:
|
||||
add_fields("aliases")
|
||||
|
||||
allowed_fields = {
|
||||
k: v
|
||||
for k, v in event_dict.items()
|
||||
if k in allowed_keys
|
||||
}
|
||||
|
||||
allowed_fields["content"] = new_content
|
||||
|
||||
allowed_fields["unsigned"] = {}
|
||||
|
||||
if "age_ts" in event.unsigned:
|
||||
allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"]
|
||||
|
||||
return type(event)(
|
||||
allowed_fields,
|
||||
internal_metadata_dict=event.internal_metadata.get_dict()
|
||||
)
|
||||
|
||||
|
||||
def format_event_raw(d):
|
||||
return d
|
||||
|
||||
|
|
|
@ -16,17 +16,11 @@
|
|||
|
||||
from twisted.internet import defer
|
||||
|
||||
from .federation_base import FederationBase
|
||||
from .units import Edu
|
||||
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.events.utils import prune_event
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
|
||||
from synapse.crypto.event_signing import check_event_content_hash
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -34,7 +28,7 @@ import logging
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FederationClient(object):
|
||||
class FederationClient(FederationBase):
|
||||
@log_function
|
||||
def send_pdu(self, pdu, destinations):
|
||||
"""Informs the replication layer about a new PDU generated within the
|
||||
|
@ -376,89 +370,3 @@ class FederationClient(object):
|
|||
event.internal_metadata.outlier = outlier
|
||||
|
||||
return event
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False):
|
||||
"""Takes a list of PDUs and checks the signatures and hashs of each
|
||||
one. If a PDU fails its signature check then we check if we have it in
|
||||
the database and if not then request if from the originating server of
|
||||
that PDU.
|
||||
|
||||
If a PDU fails its content hash check then it is redacted.
|
||||
|
||||
The given list of PDUs are not modified, instead the function returns
|
||||
a new list.
|
||||
|
||||
Args:
|
||||
pdu (list)
|
||||
outlier (bool)
|
||||
|
||||
Returns:
|
||||
Deferred : A list of PDUs that have valid signatures and hashes.
|
||||
"""
|
||||
signed_pdus = []
|
||||
for pdu in pdus:
|
||||
try:
|
||||
new_pdu = yield self._check_sigs_and_hash(pdu)
|
||||
signed_pdus.append(new_pdu)
|
||||
except SynapseError:
|
||||
# FIXME: We should handle signature failures more gracefully.
|
||||
|
||||
# Check local db.
|
||||
new_pdu = yield self.store.get_event(
|
||||
pdu.event_id,
|
||||
allow_rejected=True
|
||||
)
|
||||
if new_pdu:
|
||||
signed_pdus.append(new_pdu)
|
||||
continue
|
||||
|
||||
# Check pdu.origin
|
||||
if pdu.origin != origin:
|
||||
new_pdu = yield self.get_pdu(
|
||||
destinations=[pdu.origin],
|
||||
event_id=pdu.event_id,
|
||||
outlier=outlier,
|
||||
)
|
||||
|
||||
if new_pdu:
|
||||
signed_pdus.append(new_pdu)
|
||||
continue
|
||||
|
||||
logger.warn("Failed to find copy of %s with valid signature")
|
||||
|
||||
defer.returnValue(signed_pdus)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_sigs_and_hash(self, pdu):
|
||||
"""Throws a SynapseError if the PDU does not have the correct
|
||||
signatures.
|
||||
|
||||
Returns:
|
||||
FrozenEvent: Either the given event or it redacted if it failed the
|
||||
content hash check.
|
||||
"""
|
||||
# Check signatures are correct.
|
||||
redacted_event = prune_event(pdu)
|
||||
redacted_pdu_json = redacted_event.get_pdu_json()
|
||||
|
||||
try:
|
||||
yield self.keyring.verify_json_for_server(
|
||||
pdu.origin, redacted_pdu_json
|
||||
)
|
||||
except SynapseError:
|
||||
logger.warn(
|
||||
"Signature check failed for %s redacted to %s",
|
||||
encode_canonical_json(pdu.get_pdu_json()),
|
||||
encode_canonical_json(redacted_pdu_json),
|
||||
)
|
||||
raise
|
||||
|
||||
if not check_event_content_hash(pdu):
|
||||
logger.warn(
|
||||
"Event content has been tampered, redacting %s, %s",
|
||||
pdu.event_id, encode_canonical_json(pdu.get_dict())
|
||||
)
|
||||
defer.returnValue(redacted_event)
|
||||
|
||||
defer.returnValue(pdu)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
from twisted.internet import defer
|
||||
|
||||
from .federation_base import FederationBase
|
||||
from .units import Transaction, Edu
|
||||
|
||||
from synapse.util.logutils import log_function
|
||||
|
@ -35,7 +36,7 @@ import logging
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FederationServer(object):
|
||||
class FederationServer(FederationBase):
|
||||
def set_handler(self, handler):
|
||||
"""Sets the handler that the replication layer will use to communicate
|
||||
receipt of new PDUs from other home servers. The required methods are
|
||||
|
@ -251,17 +252,20 @@ class FederationServer(object):
|
|||
Deferred: Results in `dict` with the same format as `content`
|
||||
"""
|
||||
auth_chain = [
|
||||
(yield self._check_sigs_and_hash(self.event_from_pdu_json(e)))
|
||||
self.event_from_pdu_json(e)
|
||||
for e in content["auth_chain"]
|
||||
]
|
||||
|
||||
missing = [
|
||||
(yield self._check_sigs_and_hash(self.event_from_pdu_json(e)))
|
||||
for e in content.get("missing", [])
|
||||
]
|
||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||
origin, auth_chain, outlier=True
|
||||
)
|
||||
|
||||
ret = yield self.handler.on_query_auth(
|
||||
origin, event_id, auth_chain, content.get("rejects", []), missing
|
||||
origin,
|
||||
event_id,
|
||||
signed_auth,
|
||||
content.get("rejects", []),
|
||||
content.get("missing", []),
|
||||
)
|
||||
|
||||
time_now = self._clock.time_msec()
|
||||
|
@ -426,37 +430,3 @@ class FederationServer(object):
|
|||
event.internal_metadata.outlier = outlier
|
||||
|
||||
return event
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_sigs_and_hash(self, pdu):
|
||||
"""Throws a SynapseError if the PDU does not have the correct
|
||||
signatures.
|
||||
|
||||
Returns:
|
||||
FrozenEvent: Either the given event or it redacted if it failed the
|
||||
content hash check.
|
||||
"""
|
||||
# Check signatures are correct.
|
||||
redacted_event = prune_event(pdu)
|
||||
redacted_pdu_json = redacted_event.get_pdu_json()
|
||||
|
||||
try:
|
||||
yield self.keyring.verify_json_for_server(
|
||||
pdu.origin, redacted_pdu_json
|
||||
)
|
||||
except SynapseError:
|
||||
logger.warn(
|
||||
"Signature check failed for %s redacted to %s",
|
||||
encode_canonical_json(pdu.get_pdu_json()),
|
||||
encode_canonical_json(redacted_pdu_json),
|
||||
)
|
||||
raise
|
||||
|
||||
if not check_event_content_hash(pdu):
|
||||
logger.warn(
|
||||
"Event content has been tampered, redacting %s, %s",
|
||||
pdu.event_id, encode_canonical_json(pdu.get_dict())
|
||||
)
|
||||
defer.returnValue(redacted_event)
|
||||
|
||||
defer.returnValue(pdu)
|
||||
|
|
Loading…
Reference in a new issue