mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-27 12:08:32 +03:00
Fix SQL so that accepts we may want to persist events twice.
This commit is contained in:
parent
53216a500d
commit
aa80900a8e
2 changed files with 47 additions and 25 deletions
|
@ -99,7 +99,8 @@ class EventFederationStore(SQLBaseStore):
|
|||
"event_id": event_id,
|
||||
"prev_event_id": e_id,
|
||||
"room_id": room_id,
|
||||
}
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
||||
|
||||
# Update the extremities table if this is not an outlier.
|
||||
|
@ -120,7 +121,7 @@ class EventFederationStore(SQLBaseStore):
|
|||
# We only insert as a forward extremity the new pdu if there are no
|
||||
# other pdus that reference it as a prev pdu
|
||||
query = (
|
||||
"INSERT INTO %(table)s (event_id, room_id) "
|
||||
"INSERT OR IGNORE INTO %(table)s (event_id, room_id) "
|
||||
"SELECT ?, ? WHERE NOT EXISTS ("
|
||||
"SELECT 1 FROM %(event_edges)s WHERE "
|
||||
"prev_event_id = ? "
|
||||
|
@ -144,7 +145,8 @@ class EventFederationStore(SQLBaseStore):
|
|||
values={
|
||||
"event_id": e_id,
|
||||
"room_id": room_id,
|
||||
}
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
||||
|
||||
# Also delete from the backwards extremities table all ones that
|
||||
|
|
|
@ -181,11 +181,16 @@ class SignatureStore(SQLBaseStore):
|
|||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "event_content_hashes", {
|
||||
"event_id": event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
self._simple_insert_txn(
|
||||
txn,
|
||||
"event_content_hashes",
|
||||
{
|
||||
"event_id": event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
||||
|
||||
def _get_event_reference_hashes_txn(self, txn, event_id):
|
||||
"""Get all the hashes for a given PDU.
|
||||
|
@ -212,11 +217,16 @@ class SignatureStore(SQLBaseStore):
|
|||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "event_reference_hashes", {
|
||||
"event_id": event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
self._simple_insert_txn(
|
||||
txn,
|
||||
"event_reference_hashes",
|
||||
{
|
||||
"event_id": event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
||||
|
||||
|
||||
def _get_event_origin_signatures_txn(self, txn, event_id):
|
||||
|
@ -245,12 +255,17 @@ class SignatureStore(SQLBaseStore):
|
|||
key_id (str): Id for the signing key.
|
||||
signature (bytes): The signature.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "event_origin_signatures", {
|
||||
"event_id": event_id,
|
||||
"origin": origin,
|
||||
"key_id": key_id,
|
||||
"signature": buffer(signature_bytes),
|
||||
})
|
||||
self._simple_insert_txn(
|
||||
txn,
|
||||
"event_origin_signatures",
|
||||
{
|
||||
"event_id": event_id,
|
||||
"origin": origin,
|
||||
"key_id": key_id,
|
||||
"signature": buffer(signature_bytes),
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
||||
|
||||
def _get_prev_event_hashes_txn(self, txn, event_id):
|
||||
"""Get all the hashes for previous PDUs of a PDU
|
||||
|
@ -274,9 +289,14 @@ class SignatureStore(SQLBaseStore):
|
|||
|
||||
def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id,
|
||||
algorithm, hash_bytes):
|
||||
self._simple_insert_txn(txn, "event_edge_hashes", {
|
||||
"event_id": event_id,
|
||||
"prev_event_id": prev_event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
self._simple_insert_txn(
|
||||
txn,
|
||||
"event_edge_hashes",
|
||||
{
|
||||
"event_id": event_id,
|
||||
"prev_event_id": prev_event_id,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
},
|
||||
or_ignore=True,
|
||||
)
|
Loading…
Reference in a new issue