Require SQLite >= 3.27.0 (#13760)

This commit is contained in:
David Robertson 2022-09-09 11:14:10 +01:00 committed by GitHub
parent 69fa29700e
commit f2d2481e56
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 103 additions and 205 deletions

View file

@ -0,0 +1 @@
Synapse will now refuse to start if configured to use SQLite < 3.27.

View file

@ -533,7 +533,6 @@ class DatabasePool:
if isinstance(self.engine, Sqlite3Engine): if isinstance(self.engine, Sqlite3Engine):
self._unsafe_to_upsert_tables.add("user_directory_search") self._unsafe_to_upsert_tables.add("user_directory_search")
if self.engine.can_native_upsert:
# Check ASAP (and then later, every 1s) to see if we have finished # Check ASAP (and then later, every 1s) to see if we have finished
# background updates of tables that aren't safe to update. # background updates of tables that aren't safe to update.
self._clock.call_later( self._clock.call_later(
@ -1160,11 +1159,8 @@ class DatabasePool:
attempts = 0 attempts = 0
while True: while True:
try: try:
# We can autocommit if we are going to use native upserts # We can autocommit if it is safe to upsert
autocommit = ( autocommit = table not in self._unsafe_to_upsert_tables
self.engine.can_native_upsert
and table not in self._unsafe_to_upsert_tables
)
return await self.runInteraction( return await self.runInteraction(
desc, desc,
@ -1199,7 +1195,7 @@ class DatabasePool:
) -> bool: ) -> bool:
""" """
Pick the UPSERT method which works best on the platform. Either the Pick the UPSERT method which works best on the platform. Either the
native one (Pg9.5+, recent SQLites), or fall back to an emulated method. native one (Pg9.5+, SQLite >= 3.24), or fall back to an emulated method.
Args: Args:
txn: The transaction to use. txn: The transaction to use.
@ -1207,14 +1203,15 @@ class DatabasePool:
keyvalues: The unique key tables and their new values keyvalues: The unique key tables and their new values
values: The nonunique columns and their new values values: The nonunique columns and their new values
insertion_values: additional key/values to use only when inserting insertion_values: additional key/values to use only when inserting
lock: True to lock the table when doing the upsert. lock: True to lock the table when doing the upsert. Unused when performing
a native upsert.
Returns: Returns:
Returns True if a row was inserted or updated (i.e. if `values` is Returns True if a row was inserted or updated (i.e. if `values` is
not empty then this always returns True) not empty then this always returns True)
""" """
insertion_values = insertion_values or {} insertion_values = insertion_values or {}
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables: if table not in self._unsafe_to_upsert_tables:
return self.simple_upsert_txn_native_upsert( return self.simple_upsert_txn_native_upsert(
txn, table, keyvalues, values, insertion_values=insertion_values txn, table, keyvalues, values, insertion_values=insertion_values
) )
@ -1365,14 +1362,12 @@ class DatabasePool:
value_names: The value column names value_names: The value column names
value_values: A list of each row's value column values. value_values: A list of each row's value column values.
Ignored if value_names is empty. Ignored if value_names is empty.
lock: True to lock the table when doing the upsert. Unused if the database engine lock: True to lock the table when doing the upsert. Unused when performing
supports native upserts. a native upsert.
""" """
# We can autocommit if we are going to use native upserts # We can autocommit if it safe to upsert
autocommit = ( autocommit = table not in self._unsafe_to_upsert_tables
self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables
)
await self.runInteraction( await self.runInteraction(
desc, desc,
@ -1406,10 +1401,10 @@ class DatabasePool:
value_names: The value column names value_names: The value column names
value_values: A list of each row's value column values. value_values: A list of each row's value column values.
Ignored if value_names is empty. Ignored if value_names is empty.
lock: True to lock the table when doing the upsert. Unused if the database engine lock: True to lock the table when doing the upsert. Unused when performing
supports native upserts. a native upsert.
""" """
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables: if table not in self._unsafe_to_upsert_tables:
return self.simple_upsert_many_txn_native_upsert( return self.simple_upsert_many_txn_native_upsert(
txn, table, key_names, key_values, value_names, value_values txn, table, key_names, key_values, value_names, value_values
) )

View file

@ -129,8 +129,6 @@ class LockStore(SQLBaseStore):
now = self._clock.time_msec() now = self._clock.time_msec()
token = random_string(6) token = random_string(6)
if self.db_pool.engine.can_native_upsert:
def _try_acquire_lock_txn(txn: LoggingTransaction) -> bool: def _try_acquire_lock_txn(txn: LoggingTransaction) -> bool:
# We take out the lock if either a) there is no row for the lock # We take out the lock if either a) there is no row for the lock
# already, b) the existing row has timed out, or c) the row is # already, b) the existing row has timed out, or c) the row is
@ -174,47 +172,6 @@ class LockStore(SQLBaseStore):
if not did_lock: if not did_lock:
return None return None
else:
# If we're on an old SQLite we emulate the above logic by first
# clearing out any existing stale locks and then upserting.
def _try_acquire_lock_emulated_txn(txn: LoggingTransaction) -> bool:
sql = """
DELETE FROM worker_locks
WHERE
lock_name = ?
AND lock_key = ?
AND (last_renewed_ts < ? OR instance_name = ?)
"""
txn.execute(
sql,
(lock_name, lock_key, now - _LOCK_TIMEOUT_MS, self._instance_name),
)
inserted = self.db_pool.simple_upsert_txn_emulated(
txn,
table="worker_locks",
keyvalues={
"lock_name": lock_name,
"lock_key": lock_key,
},
values={},
insertion_values={
"token": token,
"last_renewed_ts": self._clock.time_msec(),
"instance_name": self._instance_name,
},
)
return inserted
did_lock = await self.db_pool.runInteraction(
"try_acquire_lock_emulated", _try_acquire_lock_emulated_txn
)
if not did_lock:
return None
lock = Lock( lock = Lock(
self._reactor, self._reactor,
self._clock, self._clock,

View file

@ -446,7 +446,6 @@ class StatsStore(StateDeltasStore):
absolutes: Absolute (set) fields absolutes: Absolute (set) fields
additive_relatives: Fields that will be added onto if existing row present. additive_relatives: Fields that will be added onto if existing row present.
""" """
if self.database_engine.can_native_upsert:
absolute_updates = [ absolute_updates = [
"%(field)s = EXCLUDED.%(field)s" % {"field": field} "%(field)s = EXCLUDED.%(field)s" % {"field": field}
for field in absolutes.keys() for field in absolutes.keys()
@ -482,23 +481,6 @@ class StatsStore(StateDeltasStore):
} }
txn.execute(sql, qargs) txn.execute(sql, qargs)
else:
self.database_engine.lock_table(txn, table)
retcols = list(chain(absolutes.keys(), additive_relatives.keys()))
current_row = self.db_pool.simple_select_one_txn(
txn, table, keyvalues, retcols, allow_none=True
)
if current_row is None:
merged_dict = {**keyvalues, **absolutes, **additive_relatives}
self.db_pool.simple_insert_txn(txn, table, merged_dict)
else:
for (key, val) in additive_relatives.items():
if current_row[key] is None:
current_row[key] = val
else:
current_row[key] += val
current_row.update(absolutes)
self.db_pool.simple_update_one_txn(txn, table, keyvalues, current_row)
async def _calculate_and_set_initial_state_for_room(self, room_id: str) -> None: async def _calculate_and_set_initial_state_for_room(self, room_id: str) -> None:
"""Calculate and insert an entry into room_stats_current. """Calculate and insert an entry into room_stats_current.

View file

@ -221,7 +221,6 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore):
retry_interval: how long until next retry in ms retry_interval: how long until next retry in ms
""" """
if self.database_engine.can_native_upsert:
await self.db_pool.runInteraction( await self.db_pool.runInteraction(
"set_destination_retry_timings", "set_destination_retry_timings",
self._set_destination_retry_timings_native, self._set_destination_retry_timings_native,
@ -229,16 +228,7 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore):
failure_ts, failure_ts,
retry_last_ts, retry_last_ts,
retry_interval, retry_interval,
db_autocommit=True, # Safe as its a single upsert db_autocommit=True, # Safe as it's a single upsert
)
else:
await self.db_pool.runInteraction(
"set_destination_retry_timings",
self._set_destination_retry_timings_emulated,
destination,
failure_ts,
retry_last_ts,
retry_interval,
) )
def _set_destination_retry_timings_native( def _set_destination_retry_timings_native(
@ -249,8 +239,6 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore):
retry_last_ts: int, retry_last_ts: int,
retry_interval: int, retry_interval: int,
) -> None: ) -> None:
assert self.database_engine.can_native_upsert
# Upsert retry time interval if retry_interval is zero (i.e. we're # Upsert retry time interval if retry_interval is zero (i.e. we're
# resetting it) or greater than the existing retry interval. # resetting it) or greater than the existing retry interval.
# #

View file

@ -43,14 +43,6 @@ class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta):
def single_threaded(self) -> bool: def single_threaded(self) -> bool:
... ...
@property
@abc.abstractmethod
def can_native_upsert(self) -> bool:
"""
Do we support native UPSERTs?
"""
...
@property @property
@abc.abstractmethod @abc.abstractmethod
def supports_using_any_list(self) -> bool: def supports_using_any_list(self) -> bool:

View file

@ -158,13 +158,6 @@ class PostgresEngine(BaseDatabaseEngine[psycopg2.extensions.connection]):
cursor.close() cursor.close()
db_conn.commit() db_conn.commit()
@property
def can_native_upsert(self) -> bool:
"""
Can we use native UPSERTs?
"""
return True
@property @property
def supports_using_any_list(self) -> bool: def supports_using_any_list(self) -> bool:
"""Do we support using `a = ANY(?)` and passing a list""" """Do we support using `a = ANY(?)` and passing a list"""

View file

@ -48,14 +48,6 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection]):
def single_threaded(self) -> bool: def single_threaded(self) -> bool:
return True return True
@property
def can_native_upsert(self) -> bool:
"""
Do we support native UPSERTs? This requires SQLite3 3.24+, plus some
more work we haven't done yet to tell what was inserted vs updated.
"""
return sqlite3.sqlite_version_info >= (3, 24, 0)
@property @property
def supports_using_any_list(self) -> bool: def supports_using_any_list(self) -> bool:
"""Do we support using `a = ANY(?)` and passing a list""" """Do we support using `a = ANY(?)` and passing a list"""
@ -70,12 +62,11 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection]):
self, db_conn: sqlite3.Connection, allow_outdated_version: bool = False self, db_conn: sqlite3.Connection, allow_outdated_version: bool = False
) -> None: ) -> None:
if not allow_outdated_version: if not allow_outdated_version:
version = sqlite3.sqlite_version_info
# Synapse is untested against older SQLite versions, and we don't want # Synapse is untested against older SQLite versions, and we don't want
# to let users upgrade to a version of Synapse with broken support for their # to let users upgrade to a version of Synapse with broken support for their
# sqlite version, because it risks leaving them with a half-upgraded db. # sqlite version, because it risks leaving them with a half-upgraded db.
if version < (3, 22, 0): if sqlite3.sqlite_version_info < (3, 27, 0):
raise RuntimeError("Synapse requires sqlite 3.22 or above.") raise RuntimeError("Synapse requires sqlite 3.27 or above.")
def check_new_database(self, txn: Cursor) -> None: def check_new_database(self, txn: Cursor) -> None:
"""Gets called when setting up a brand new database. This allows us to """Gets called when setting up a brand new database. This allows us to

View file

@ -54,7 +54,6 @@ class SQLBaseStoreTestCase(unittest.TestCase):
sqlite_config = {"name": "sqlite3"} sqlite_config = {"name": "sqlite3"}
engine = create_engine(sqlite_config) engine = create_engine(sqlite_config)
fake_engine = Mock(wraps=engine) fake_engine = Mock(wraps=engine)
fake_engine.can_native_upsert = False
fake_engine.in_transaction.return_value = False fake_engine.in_transaction.return_value = False
db = DatabasePool(Mock(), Mock(config=sqlite_config), fake_engine) db = DatabasePool(Mock(), Mock(config=sqlite_config), fake_engine)