diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index d77d7792f0..c0aff79141 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -91,10 +91,19 @@ jobs: rm -rf /tmp/.buildx-cache mv /tmp/.buildx-cache-new /tmp/.buildx-cache + - name: Artifact name + id: artifact-name + # We can't have colons in the upload name of the artifact, so we convert + # e.g. `debian:sid` to `sid`. + env: + DISTRO: ${{ matrix.distro }} + run: | + echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT" + - name: Upload debs as artifacts uses: actions/upload-artifact@v4 with: - name: debs + name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }} path: debs/* build-wheels: @@ -102,7 +111,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-22.04, macos-12] + os: [ubuntu-22.04, macos-13] arch: [x86_64, aarch64] # is_pr is a flag used to exclude certain jobs from the matrix on PRs. # It is not read by the rest of the workflow. @@ -112,9 +121,9 @@ jobs: exclude: # Don't build macos wheels on PR CI. - is_pr: true - os: "macos-12" + os: "macos-13" # Don't build aarch64 wheels on mac. - - os: "macos-12" + - os: "macos-13" arch: aarch64 # Don't build aarch64 wheels on PR CI. - is_pr: true @@ -196,17 +205,18 @@ jobs: - name: Download all workflow run artifacts uses: actions/download-artifact@v4 - name: Build a tarball for the debs - run: tar -cvJf debs.tar.xz debs + # We need to merge all the debs uploads into one folder, then compress + # that. + run: | + mkdir debs + mv debs*/* debs/ + tar -cvJf debs.tar.xz debs - name: Attach to release - uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109 + uses: softprops/action-gh-release@v2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: files: | Sdist/* - Wheel/* + Wheel*/* debs.tar.xz - # if it's not already published, keep the release as a draft. - draft: true - # mark it as a prerelease if the tag contains 'rc'. - prerelease: ${{ contains(github.ref, 'rc') }} diff --git a/CHANGES.md b/CHANGES.md index bcc8340210..be5c18c84b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,68 @@ +# Synapse 1.119.0 (2024-11-13) + +No significant changes since 1.119.0rc2. + +### Python 3.8 support dropped + +Python 3.8 is [end-of-life](https://devguide.python.org/versions/) and is no longer supported by Synapse. The minimum supported Python version is now 3.9. + +If you are running Synapse with Python 3.8, please upgrade to Python 3.9 (or greater) before upgrading Synapse. + + +# Synapse 1.119.0rc2 (2024-11-11) + +Note that due to packaging issues there was no v1.119.0rc1. + + +### Features + +- Support [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151)'s stable report room API. ([\#17374](https://github.com/element-hq/synapse/issues/17374)) +- Add experimental support for [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222) (Adding `state_after` to sync v2). ([\#17888](https://github.com/element-hq/synapse/issues/17888)) + +### Bugfixes + +- Fix bug with sliding sync where `$LAZY`-loading room members would not return `required_state` membership in incremental syncs. ([\#17809](https://github.com/element-hq/synapse/issues/17809)) +- Check if user has membership in a room before tagging it. Contributed by Lama Alosaimi. ([\#17839](https://github.com/element-hq/synapse/issues/17839)) +- Fix a bug in the admin redact endpoint where the background task would not run if a worker was specified in + the config option `run_background_tasks_on`. ([\#17847](https://github.com/element-hq/synapse/issues/17847)) +- Fix bug where some presence and typing timeouts can expire early. ([\#17850](https://github.com/element-hq/synapse/issues/17850)) +- Fix detection when the built Rust library was outdated when using source installations. ([\#17861](https://github.com/element-hq/synapse/issues/17861)) +- Fix a long-standing bug in Synapse which could cause one-time keys to be issued in the incorrect order, causing message decryption failures. ([\#17903](https://github.com/element-hq/synapse/pull/17903)) +- Fix experimental support for [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222) (Adding `state_after` to sync v2) where we would return the full state on incremental syncs when using lazy loaded members and there were no new events in the timeline. ([\#17915](https://github.com/element-hq/synapse/pull/17915)) + +### Internal Changes + +- Remove support for python 3.8. ([\#17908](https://github.com/element-hq/synapse/issues/17908)) +- Add a test for downloading and thumbnailing a CMYK JPEG. ([\#17786](https://github.com/element-hq/synapse/issues/17786)) +- Refactor database calls to remove `Generator` usage. ([\#17813](https://github.com/element-hq/synapse/issues/17813), [\#17814](https://github.com/element-hq/synapse/issues/17814), [\#17815](https://github.com/element-hq/synapse/issues/17815), [\#17816](https://github.com/element-hq/synapse/issues/17816), [\#17817](https://github.com/element-hq/synapse/issues/17817), [\#17818](https://github.com/element-hq/synapse/issues/17818), [\#17890](https://github.com/element-hq/synapse/issues/17890)) +- Include the destination in the error of 'Destination mismatch' on federation requests. ([\#17830](https://github.com/element-hq/synapse/issues/17830)) +- The nix flake inside the repository no longer tracks nixpkgs/master to not catch the latest bugs from a PR merged 5 minutes ago. ([\#17852](https://github.com/element-hq/synapse/issues/17852)) +- Minor speed-up of sliding sync by computing extensions results in parallel. ([\#17884](https://github.com/element-hq/synapse/issues/17884)) +- Bump the default Python version in the Synapse Dockerfile from 3.11 -> 3.12. ([\#17887](https://github.com/element-hq/synapse/issues/17887)) +- Remove usage of internal header encoding API. ([\#17894](https://github.com/element-hq/synapse/issues/17894)) +- Use unique name for each os.arch variant when uploading Wheel artifacts. ([\#17905](https://github.com/element-hq/synapse/issues/17905)) +- Fix tests to run with latest Twisted. ([\#17906](https://github.com/element-hq/synapse/pull/17906), [\#17907](https://github.com/element-hq/synapse/pull/17907), [\#17911](https://github.com/element-hq/synapse/pull/17911)) +- Update version constraint to allow the latest poetry-core 1.9.1. ([\#17902](https://github.com/element-hq/synapse/pull/17902)) +- Update the portdb CI to use Python 3.13 and Postgres 17 as latest dependencies. ([\#17909](https://github.com/element-hq/synapse/pull/17909)) +- Add an index to `current_state_delta_stream` table. ([\#17912](https://github.com/element-hq/synapse/issues/17912)) +- Fix building and attaching release artifacts during the release process. ([\#17921](https://github.com/element-hq/synapse/issues/17921)) + +### Updates to locked dependencies + +* Bump actions/download-artifact & actions/upload-artifact from 3 to 4 in /.github/workflows. ([\#17657](https://github.com/element-hq/synapse/issues/17657)) +* Bump anyhow from 1.0.89 to 1.0.92. ([\#17858](https://github.com/element-hq/synapse/issues/17858), [\#17876](https://github.com/element-hq/synapse/issues/17876), [\#17901](https://github.com/element-hq/synapse/issues/17901)) +* Bump bytes from 1.7.2 to 1.8.0. ([\#17877](https://github.com/element-hq/synapse/issues/17877)) +* Bump cryptography from 43.0.1 to 43.0.3. ([\#17853](https://github.com/element-hq/synapse/issues/17853)) +* Bump mypy-zope from 1.0.7 to 1.0.8. ([\#17898](https://github.com/element-hq/synapse/issues/17898)) +* Bump phonenumbers from 8.13.47 to 8.13.49. ([\#17880](https://github.com/element-hq/synapse/issues/17880), [\#17899](https://github.com/element-hq/synapse/issues/17899)) +* Bump python-multipart from 0.0.12 to 0.0.16. ([\#17879](https://github.com/element-hq/synapse/issues/17879)) +* Bump regex from 1.11.0 to 1.11.1. ([\#17874](https://github.com/element-hq/synapse/issues/17874)) +* Bump ruff from 0.6.9 to 0.7.2. ([\#17868](https://github.com/element-hq/synapse/issues/17868), [\#17897](https://github.com/element-hq/synapse/issues/17897)) +* Bump serde from 1.0.210 to 1.0.214. ([\#17875](https://github.com/element-hq/synapse/issues/17875), [\#17900](https://github.com/element-hq/synapse/issues/17900)) +* Bump serde_json from 1.0.128 to 1.0.132. ([\#17857](https://github.com/element-hq/synapse/issues/17857)) +* Bump types-psycopg2 from 2.9.21.20240819 to 2.9.21.20241019. ([\#17855](https://github.com/element-hq/synapse/issues/17855)) +* Bump types-setuptools from 75.1.0.20241014 to 75.2.0.20241019. ([\#17856](https://github.com/element-hq/synapse/issues/17856)) + # Synapse 1.118.0 (2024-10-29) No significant changes since 1.118.0rc1. diff --git a/Cargo.lock b/Cargo.lock index 2e23b95f85..46c930ebd7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f37166d7d48a0284b99dd824694c26119c700b53bf0d1540cdb147dbdaaf13" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "arc-swap" @@ -485,18 +485,18 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.214" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/17374.feature b/changelog.d/17374.feature deleted file mode 100644 index 3321f18947..0000000000 --- a/changelog.d/17374.feature +++ /dev/null @@ -1 +0,0 @@ -Support [MSC4151](https://github.com/matrix-org/matrix-spec-proposals/pull/4151)'s stable report room API. \ No newline at end of file diff --git a/changelog.d/17638.removal b/changelog.d/17638.removal new file mode 100644 index 0000000000..1bb09e976e --- /dev/null +++ b/changelog.d/17638.removal @@ -0,0 +1 @@ +Remove support for closed [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886). \ No newline at end of file diff --git a/changelog.d/17657.misc b/changelog.d/17657.misc deleted file mode 100644 index aff558adf7..0000000000 --- a/changelog.d/17657.misc +++ /dev/null @@ -1 +0,0 @@ -Bump actions/download-artifact and actions/upload-artifact from v3 -> v4. diff --git a/changelog.d/17786.misc b/changelog.d/17786.misc deleted file mode 100644 index 59eb3f4dbd..0000000000 --- a/changelog.d/17786.misc +++ /dev/null @@ -1 +0,0 @@ -Add a test for downloading and thumbnailing a CMYK JPEG. \ No newline at end of file diff --git a/changelog.d/17809.bugfix b/changelog.d/17809.bugfix deleted file mode 100644 index e244a36bd3..0000000000 --- a/changelog.d/17809.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug with sliding sync where `$LAZY`-loading room members would not return `required_state` membership in incremental syncs. diff --git a/changelog.d/17813.misc b/changelog.d/17813.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17813.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17814.misc b/changelog.d/17814.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17814.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17815.misc b/changelog.d/17815.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17815.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17816.misc b/changelog.d/17816.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17816.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17817.misc b/changelog.d/17817.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17817.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17818.misc b/changelog.d/17818.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17818.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17830.misc b/changelog.d/17830.misc deleted file mode 100644 index b2cd91c0ad..0000000000 --- a/changelog.d/17830.misc +++ /dev/null @@ -1 +0,0 @@ -Include the destination in the error of 'Destination mismatch' on federation requests. diff --git a/changelog.d/17839.bugfix b/changelog.d/17839.bugfix deleted file mode 100644 index 57667a6df5..0000000000 --- a/changelog.d/17839.bugfix +++ /dev/null @@ -1 +0,0 @@ -Check if user has membership in a room before tagging it. Contributed by Lama Alosaimi. \ No newline at end of file diff --git a/changelog.d/17847.bugfix b/changelog.d/17847.bugfix deleted file mode 100644 index 0ba39df94d..0000000000 --- a/changelog.d/17847.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Fix a bug in the admin redact endpoint where the background task would not run if a worker was specified in -the config option `run_background_tasks_on`. \ No newline at end of file diff --git a/changelog.d/17850.bugfix b/changelog.d/17850.bugfix deleted file mode 100644 index 8ea99c4ef9..0000000000 --- a/changelog.d/17850.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug when some presence and typing timeouts can expire early. \ No newline at end of file diff --git a/changelog.d/17852.misc b/changelog.d/17852.misc deleted file mode 100644 index b1b7ac9734..0000000000 --- a/changelog.d/17852.misc +++ /dev/null @@ -1 +0,0 @@ -The nix flake inside the repository no longer tracks nixpkgs/master to not catch the latest bugs from a PR merged 5 minutes ago. diff --git a/changelog.d/17861.bugfix b/changelog.d/17861.bugfix deleted file mode 100644 index abee7a30f7..0000000000 --- a/changelog.d/17861.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix detection when the built Rust library was outdated when using source installations. diff --git a/changelog.d/17865.misc b/changelog.d/17865.misc new file mode 100644 index 0000000000..2303a7e1b7 --- /dev/null +++ b/changelog.d/17865.misc @@ -0,0 +1 @@ +Addressed some typos in docs and returned error message for unknown MXC ID. diff --git a/changelog.d/17884.misc b/changelog.d/17884.misc deleted file mode 100644 index 9dfa13f853..0000000000 --- a/changelog.d/17884.misc +++ /dev/null @@ -1 +0,0 @@ -Minor speed-up of sliding sync by computing extensions results in parallel. diff --git a/changelog.d/17887.misc b/changelog.d/17887.misc deleted file mode 100644 index 6be32caee6..0000000000 --- a/changelog.d/17887.misc +++ /dev/null @@ -1 +0,0 @@ -Bump the default Python version in the Synapse Dockerfile from 3.11 -> 3.12. \ No newline at end of file diff --git a/changelog.d/17888.feature b/changelog.d/17888.feature deleted file mode 100644 index 3ede8886ab..0000000000 --- a/changelog.d/17888.feature +++ /dev/null @@ -1 +0,0 @@ -Add experimental support for [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222). diff --git a/changelog.d/17890.misc b/changelog.d/17890.misc deleted file mode 100644 index f8676aee59..0000000000 --- a/changelog.d/17890.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor database calls to remove `Generator` usage. diff --git a/changelog.d/17894.misc b/changelog.d/17894.misc deleted file mode 100644 index dc1a7577ab..0000000000 --- a/changelog.d/17894.misc +++ /dev/null @@ -1 +0,0 @@ -Remove usage of internal header encoding API. diff --git a/changelog.d/17902.misc b/changelog.d/17902.misc deleted file mode 100644 index f094f57c2f..0000000000 --- a/changelog.d/17902.misc +++ /dev/null @@ -1 +0,0 @@ -Update version constraint to allow the latest poetry-core 1.9.1. diff --git a/changelog.d/17903.bugfix b/changelog.d/17903.bugfix deleted file mode 100644 index a4d02fc983..0000000000 --- a/changelog.d/17903.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug in Synapse which could cause one-time keys to be issued in the incorrect order, causing message decryption failures. diff --git a/changelog.d/17905.misc b/changelog.d/17905.misc deleted file mode 100644 index 32ef50dbac..0000000000 --- a/changelog.d/17905.misc +++ /dev/null @@ -1 +0,0 @@ -Use unique name for each os.arch variant when uploading Wheel artifacts. diff --git a/changelog.d/17906.bugfix b/changelog.d/17906.bugfix deleted file mode 100644 index f38ce6a590..0000000000 --- a/changelog.d/17906.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix tests to run with latest Twisted. diff --git a/changelog.d/17907.bugfix b/changelog.d/17907.bugfix deleted file mode 100644 index f38ce6a590..0000000000 --- a/changelog.d/17907.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix tests to run with latest Twisted. diff --git a/changelog.d/17908.misc b/changelog.d/17908.misc deleted file mode 100644 index 8f17729148..0000000000 --- a/changelog.d/17908.misc +++ /dev/null @@ -1 +0,0 @@ -Remove support for python 3.8. diff --git a/changelog.d/17909.misc b/changelog.d/17909.misc deleted file mode 100644 index f826aa7948..0000000000 --- a/changelog.d/17909.misc +++ /dev/null @@ -1 +0,0 @@ -Update the portdb CI to use Python 3.13 and Postgres 17 as latest dependencies. \ No newline at end of file diff --git a/changelog.d/17911.bugfix b/changelog.d/17911.bugfix deleted file mode 100644 index f38ce6a590..0000000000 --- a/changelog.d/17911.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix tests to run with latest Twisted. diff --git a/changelog.d/17913.doc b/changelog.d/17913.doc new file mode 100644 index 0000000000..39f5979562 --- /dev/null +++ b/changelog.d/17913.doc @@ -0,0 +1 @@ +Clarify the semantics of the `enable_authenticated_media` configuration option. diff --git a/changelog.d/17923.misc b/changelog.d/17923.misc new file mode 100644 index 0000000000..4d74e7e184 --- /dev/null +++ b/changelog.d/17923.misc @@ -0,0 +1 @@ +Unpin the upload release GHA action. diff --git a/changelog.d/17924.misc b/changelog.d/17924.misc new file mode 100644 index 0000000000..c7cc502360 --- /dev/null +++ b/changelog.d/17924.misc @@ -0,0 +1 @@ +Bump macos version used to build wheels during release, as current version used is end-of-life. diff --git a/changelog.d/17928.misc b/changelog.d/17928.misc new file mode 100644 index 0000000000..b5aef4457a --- /dev/null +++ b/changelog.d/17928.misc @@ -0,0 +1 @@ +Move server event filtering logic to rust. diff --git a/changelog.d/17931.doc b/changelog.d/17931.doc new file mode 100644 index 0000000000..9207cb0a1c --- /dev/null +++ b/changelog.d/17931.doc @@ -0,0 +1 @@ +Add documentation about backing up Synapse. diff --git a/debian/changelog b/debian/changelog index 384887888f..bacd453cb4 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,21 @@ +matrix-synapse-py3 (1.119.0) stable; urgency=medium + + * New Synapse release 1.119.0. + + -- Synapse Packaging team Wed, 13 Nov 2024 13:57:51 +0000 + +matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium + + * New Synapse release 1.119.0rc2. + + -- Synapse Packaging team Mon, 11 Nov 2024 14:33:02 +0000 + +matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium + + * New Synapse release 1.119.0rc1. + + -- Synapse Packaging team Wed, 06 Nov 2024 08:59:43 -0700 + matrix-synapse-py3 (1.118.0) stable; urgency=medium * New Synapse release 1.118.0. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index c50121d5f7..fd91d9fa11 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -54,6 +54,7 @@ - [Using `synctl` with Workers](synctl_workers.md) - [Systemd](systemd-with-workers/README.md) - [Administration](usage/administration/README.md) + - [Backups](usage/administration/backups.md) - [Admin API](usage/administration/admin_api/README.md) - [Account Validity](admin_api/account_validity.md) - [Background Updates](usage/administration/admin_api/background_updates.md) diff --git a/docs/postgres.md b/docs/postgres.md index d06f0cda10..51670667e8 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -100,6 +100,10 @@ database: keepalives_count: 3 ``` +## Backups + +Don't forget to [back up](./usage/administration/backups.md#database) your database! + ## Tuning Postgres The default settings should be fine for most deployments. For larger diff --git a/docs/setup/installation.md b/docs/setup/installation.md index d717880aa5..bfeacab375 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -656,6 +656,10 @@ This also requires the optional `lxml` python dependency to be installed. This in turn requires the `libxml2` library to be available - on Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for your OS. +### Backups + +Don't forget to take [backups](../usage/administration/backups.md) of your new server! + ### Troubleshooting Installation `pip` seems to leak *lots* of memory during installation. For instance, a Linux diff --git a/docs/upgrade.md b/docs/upgrade.md index ea9824a5ee..9f12d7c34f 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -117,6 +117,17 @@ each upgrade are complete before moving on to the next upgrade, to avoid stacking them up. You can monitor the currently running background updates with [the Admin API](usage/administration/admin_api/background_updates.html#status). +# Upgrading to v1.120.0 + +## Removal of experimental MSC3886 feature + +[MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) +has been closed (and will not enter the Matrix spec). As such, we are +removing the experimental support for it in this release. + +The `experimental_features.msc3886_endpoint` configuration option has +been removed. + # Upgrading to v1.119.0 ## Minimum supported Python version diff --git a/docs/usage/administration/backups.md b/docs/usage/administration/backups.md new file mode 100644 index 0000000000..24d250179b --- /dev/null +++ b/docs/usage/administration/backups.md @@ -0,0 +1,125 @@ +# How to back up a Synapse homeserver + +It is critical to maintain good backups of your server, to guard against +hardware failure as well as potential corruption due to bugs or administrator +error. + +This page documents the things you will need to consider backing up as part of +a Synapse installation. + +## Configuration files + +Keep a copy of your configuration file (`homeserver.yaml`), as well as any +auxiliary config files it refers to such as the +[`log_config`](../configuration/config_documentation.md#log_config) file, +[`app_service_config_files`](../configuration/config_documentation.md#app_service_config_files). +Often, all such config files will be kept in a single directory such as +`/etc/synapse`, which will make this easier. + +## Server signing key + +Your server has a [signing +key](../configuration/config_documentation.md#signing_key_path) which it uses +to sign events and outgoing federation requests. It is easiest to back it up +with your configuration files, but an alternative is to have Synapse create a +new signing key if you have to restore. + +If you do decide to replace the signing key, you should add the old *public* +key to +[`old_signing_keys`](../configuration/config_documentation.md#old_signing_keys). + +## Database + +Synapse's support for SQLite is only suitable for testing purposes, so for the +purposes of this document, we'll assume you are using +[PostgreSQL](../../postgres.md). + +A full discussion of backup strategies for PostgreSQL is out of scope for this +document; see the [PostgreSQL +documentation](https://www.postgresql.org/docs/current/backup.html) for +detailed information. + +### Synapse-specfic details + + * Be very careful not to restore into a database that already has tables + present. At best, this will error; at worst, it will lead to subtle database + inconsistencies. + + * The `e2e_one_time_keys_json` table should **not** be backed up, or if it is + backed up, should be + [`TRUNCATE`d](https://www.postgresql.org/docs/current/sql-truncate.html) + after restoring the database before Synapse is started. + + [Background: restoring the database to an older backup can cause + used one-time-keys to be re-issued, causing subsequent [message decryption + errors](https://github.com/element-hq/element-meta/issues/2155). Clearing + all one-time-keys from the database ensures that this cannot happen, and + will prompt clients to generate and upload new one-time-keys.] + +### Quick and easy database backup and restore + +Typically, the easiest solution is to use `pg_dump` to take a copy of the whole +database. We recommend `pg_dump`'s custom dump format, as it produces +significantly smaller backup files. + +```shell +sudo -u postgres pg_dump -Fc --exclude-table-data e2e_one_time_keys_json synapse > synapse.dump +``` + +There is no need to stop Postgres or Synapse while `pg_dump` is running: it +will take a consistent snapshot of the databse. + +To restore, you will need to recreate the database as described in [Using +Postgres](../../postgres.md#set-up-database), +then load the dump into it with `pg_restore`: + +```shell +sudo -u postgres createdb --encoding=UTF8 --locale=C --template=template0 --owner=synapse_user synapse +sudo -u postgres pg_restore -d synapse < synapse.dump +``` + +(If you forgot to exclude `e2e_one_time_keys_json` during `pg_dump`, remember +to connect to the new database and `TRUNCATE e2e_one_time_keys_json;` before +starting Synapse.) + +To reiterate: do **not** restore a dump over an existing database. + +Again, if you plan to run your homeserver at any sort of production level, we +recommend studying the PostgreSQL documentation on backup options. + +## Media store + +Synapse keeps a copy of media uploaded by users, including avatars and message +attachments, in its [Media +store](../configuration/config_documentation.md#media-store). + +It is a directory on the local disk, containing the following directories: + + * `local_content`: this is content uploaded by your local users. As a general + rule, you should back this up: it may represent the only copy of those + media files anywhere in the federation, and if they are lost, users will + see errors when viewing user or room avatars, and messages with attachments. + + * `local_thumbnails`: "thumbnails" of images uploaded by your users. If + [`dynamic_thumbnails`](../configuration/config_documentation.md#dynamic_thumbnails) + is enabled, these will be regenerated if they are removed from the disk, and + there is therefore no need to back them up. + + If `dynamic_thumbnails` is *not* enabled (the default): although this can + theoretically be regenerated from `local_content`, there is no tooling to do + so. We recommend that these are backed up too. + + * `remote_content`: this is a cache of content that was uploaded by a user on + another server, and has since been requested by a user on your own server. + + Typically there is no need to back up this directory: if a file in this directory + is removed, Synapse will attempt to fetch it again from the remote + server. + + * `remote_thumbnails`: thumbnails of images uploaded by users on other + servers. As with `remote_content`, there is normally no need to back this + up. + + * `url_cache`, `url_cache_thumbnails`: temporary caches of files downloaded + by the [URL previews](../../setup/installation.md#url-previews) feature. + These do not need to be backed up. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index f420a3fada..0b65b2dbb4 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1889,6 +1889,26 @@ When set to true, all subsequent media uploads will be marked as authenticated, unauthenticated media endpoints (`/_matrix/media/(r0|v3|v1)/download` and `/_matrix/media/(r0|v3|v1)/thumbnail`) - requests for authenticated media over these endpoints will result in a 404. All media, including authenticated media, will be available over the authenticated media endpoints `_matrix/client/v1/media/download` and `_matrix/client/v1/media/thumbnail`. Media uploaded prior to setting this option to true will still be available over the legacy endpoints. Note if the setting is switched to false after enabling, media marked as authenticated will be available over legacy endpoints. Defaults to true (previously false). In a future release of Synapse, this option will be removed and become always-on. +In all cases, authenticated requests to download media will succeed, but for unauthenticated requests, this +case-by-case breakdown describes whether media downloads are permitted: + +* `enable_authenticated_media = False`: + * unauthenticated client or homeserver requesting local media: allowed + * unauthenticated client or homeserver requesting remote media: allowed as long as the media is in the cache, + or as long as the remote homeserver does not require authentication to retrieve the media +* `enable_authenticated_media = True`: + * unauthenticated client or homeserver requesting local media: + allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist); + otherwise denied. + * unauthenticated client or homeserver requesting remote media: the same as for local media; + allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist); + otherwise denied. + +It is especially notable that media downloaded before this option existed (in older Synapse versions), or whilst this option was set to `False`, +will perpetually be available over the legacy, unauthenticated endpoint, even after this option is set to `True`. +This is for backwards compatibility with older clients and homeservers that do not yet support requesting authenticated media; +those older clients or homeservers will not be cut off from media they can already see. + Example configuration: ```yaml enable_authenticated_media: false @@ -3107,6 +3127,15 @@ it was last used. It is possible to build an entry from an old `signing.key` file using the `export_signing_key` script which is provided with synapse. +If you have lost the private key file, you can ask another server you trust to +tell you the public keys it has seen from your server. To fetch the keys from +`matrix.org`, try something like: + +``` +curl https://matrix-federation.matrix.org/_matrix/key/v2/query/myserver.example.com | + jq '.server_keys | map(.verify_keys) | add' +``` + Example configuration: ```yaml old_signing_keys: @@ -4370,9 +4399,9 @@ It is possible to scale the processes that handle sending outbound federation re by running a [`generic_worker`](../../workers.md#synapseappgeneric_worker) and adding it's [`worker_name`](#worker_name) to a `federation_sender_instances` map. Doing so will remove handling of this function from the main process. Multiple workers can be added to this map, in which case the work is -balanced across them. +balanced across them. -The way that the load balancing works is any outbound federation request will be assigned +The way that the load balancing works is any outbound federation request will be assigned to a federation sender worker based on the hash of the destination server name. This means that all requests being sent to the same destination will be processed by the same worker instance. Multiple `federation_sender_instances` are useful if there is a federation @@ -4729,7 +4758,7 @@ This setting has the following sub-options: * `only_for_direct_messages`: Whether invites should be automatically accepted for all room types, or only for direct messages. Defaults to false. * `only_from_local_users`: Whether to only automatically accept invites from users on this homeserver. Defaults to false. -* `worker_to_run_on`: Which worker to run this module on. This must match +* `worker_to_run_on`: Which worker to run this module on. This must match the "worker_name". If not set or `null`, invites will be accepted on the main process. diff --git a/poetry.lock b/poetry.lock index 16b7dc504e..eece221095 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -104,21 +104,20 @@ typecheck = ["mypy"] [[package]] name = "bleach" -version = "6.1.0" +version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, + {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, + {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, ] [package.dependencies] -six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] +css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "canonicaljson" @@ -725,13 +724,13 @@ files = [ [[package]] name = "immutabledict" -version = "4.2.0" +version = "4.2.1" description = "Immutable wrapper around dictionaries (a fork of frozendict)" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "immutabledict-4.2.0-py3-none-any.whl", hash = "sha256:d728b2c2410d698d95e6200237feb50a695584d20289ad3379a439aa3d90baba"}, - {file = "immutabledict-4.2.0.tar.gz", hash = "sha256:e003fd81aad2377a5a758bf7e1086cf3b70b63e9a5cc2f46bce8d0a2b4727c5f"}, + {file = "immutabledict-4.2.1-py3-none-any.whl", hash = "sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373"}, + {file = "immutabledict-4.2.1.tar.gz", hash = "sha256:d91017248981c72eb66c8ff9834e99c2f53562346f23e7f51e7a5ebcf66a3bcc"}, ] [[package]] @@ -1419,13 +1418,13 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -1444,13 +1443,13 @@ dev = ["jinja2"] [[package]] name = "phonenumbers" -version = "8.13.49" +version = "8.13.50" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.49-py2.py3-none-any.whl", hash = "sha256:e17140955ab3d8f9580727372ea64c5ada5327932d6021ef6fd203c3db8c8139"}, - {file = "phonenumbers-8.13.49.tar.gz", hash = "sha256:e608ccb61f0bd42e6db1d2c421f7c22186b88f494870bf40aa31d1a2718ab0ae"}, + {file = "phonenumbers-8.13.50-py2.py3-none-any.whl", hash = "sha256:bb95dbc0d9979c51f7ad94bcd780784938958861fbb4b75a2fe39ccd3d58954a"}, + {file = "phonenumbers-8.13.50.tar.gz", hash = "sha256:e05ac6fb7b98c6d719a87ea895b9fc153673b4a51f455ec9afaf557ef4629da6"}, ] [[package]] @@ -1785,13 +1784,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygithub" -version = "2.4.0" +version = "2.5.0" description = "Use the full Github API v3" optional = false python-versions = ">=3.8" files = [ - {file = "PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24"}, - {file = "pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051"}, + {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, + {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, ] [package.dependencies] @@ -2257,29 +2256,29 @@ files = [ [[package]] name = "ruff" -version = "0.7.2" +version = "0.7.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, - {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, - {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, - {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, - {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, - {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, - {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, + {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, + {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, + {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, + {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, + {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, + {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, + {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, ] [[package]] @@ -3102,4 +3101,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.9.0" -content-hash = "0cd942a5193d01cbcef135a0bebd3fa0f12f7dbc63899d6f1c301e0649e9d902" +content-hash = "d71159b19349fdc0b7cd8e06e8c8778b603fc37b941c6df34ddc31746783d94d" diff --git a/pyproject.toml b/pyproject.toml index 13de146b4e..3688aececf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.118.0" +version = "1.119.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" @@ -320,7 +320,7 @@ all = [ # failing on new releases. Keeping lower bounds loose here means that dependabot # can bump versions without having to update the content-hash in the lockfile. # This helps prevents merge conflicts when running a batch of dependabot updates. -ruff = "0.7.2" +ruff = "0.7.3" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" diff --git a/rust/src/events/filter.rs b/rust/src/events/filter.rs new file mode 100644 index 0000000000..7e39972c62 --- /dev/null +++ b/rust/src/events/filter.rs @@ -0,0 +1,107 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +use std::collections::HashMap; + +use pyo3::{exceptions::PyValueError, pyfunction, PyResult}; + +use crate::{ + identifier::UserID, + matrix_const::{ + HISTORY_VISIBILITY_INVITED, HISTORY_VISIBILITY_JOINED, MEMBERSHIP_INVITE, MEMBERSHIP_JOIN, + }, +}; + +#[pyfunction(name = "event_visible_to_server")] +pub fn event_visible_to_server_py( + sender: String, + target_server_name: String, + history_visibility: String, + erased_senders: HashMap, + partial_state_invisible: bool, + memberships: Vec<(String, String)>, // (state_key, membership) +) -> PyResult { + event_visible_to_server( + sender, + target_server_name, + history_visibility, + erased_senders, + partial_state_invisible, + memberships, + ) + .map_err(|e| PyValueError::new_err(format!("{e}"))) +} + +/// Return whether the target server is allowed to see the event. +/// +/// For a fully stated room, the target server is allowed to see an event E if: +/// - the state at E has world readable or shared history vis, OR +/// - the state at E says that the target server is in the room. +/// +/// For a partially stated room, the target server is allowed to see E if: +/// - E was created by this homeserver, AND: +/// - the partial state at E has world readable or shared history vis, OR +/// - the partial state at E says that the target server is in the room. +pub fn event_visible_to_server( + sender: String, + target_server_name: String, + history_visibility: String, + erased_senders: HashMap, + partial_state_invisible: bool, + memberships: Vec<(String, String)>, // (state_key, membership) +) -> anyhow::Result { + if let Some(&erased) = erased_senders.get(&sender) { + if erased { + return Ok(false); + } + } + + if partial_state_invisible { + return Ok(false); + } + + if history_visibility != HISTORY_VISIBILITY_INVITED + && history_visibility != HISTORY_VISIBILITY_JOINED + { + return Ok(true); + } + + let mut visible = false; + for (state_key, membership) in memberships { + let state_key = UserID::try_from(state_key.as_ref()) + .map_err(|e| anyhow::anyhow!(format!("invalid user_id ({state_key}): {e}")))?; + if state_key.server_name() != target_server_name { + return Err(anyhow::anyhow!( + "state_key.server_name ({}) does not match target_server_name ({target_server_name})", + state_key.server_name() + )); + } + + match membership.as_str() { + MEMBERSHIP_INVITE => { + if history_visibility == HISTORY_VISIBILITY_INVITED { + visible = true; + break; + } + } + MEMBERSHIP_JOIN => { + visible = true; + break; + } + _ => continue, + } + } + + Ok(visible) +} diff --git a/rust/src/events/mod.rs b/rust/src/events/mod.rs index a4ade1a178..0bb6cdb181 100644 --- a/rust/src/events/mod.rs +++ b/rust/src/events/mod.rs @@ -22,15 +22,17 @@ use pyo3::{ types::{PyAnyMethods, PyModule, PyModuleMethods}, - Bound, PyResult, Python, + wrap_pyfunction, Bound, PyResult, Python, }; +pub mod filter; mod internal_metadata; /// Called when registering modules with python. pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { let child_module = PyModule::new_bound(py, "events")?; child_module.add_class::()?; + child_module.add_function(wrap_pyfunction!(filter::event_visible_to_server_py, m)?)?; m.add_submodule(&child_module)?; diff --git a/rust/src/identifier.rs b/rust/src/identifier.rs new file mode 100644 index 0000000000..b199c5838e --- /dev/null +++ b/rust/src/identifier.rs @@ -0,0 +1,86 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +//! # Matrix Identifiers +//! +//! This module contains definitions and utilities for working with matrix identifiers. + +use std::{fmt, ops::Deref}; + +/// Errors that can occur when parsing a matrix identifier. +#[derive(Clone, Debug, PartialEq)] +pub enum IdentifierError { + IncorrectSigil, + MissingColon, +} + +impl fmt::Display for IdentifierError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +/// A Matrix user_id. +#[derive(Clone, Debug, PartialEq)] +pub struct UserID(String); + +impl UserID { + /// Returns the `localpart` of the user_id. + pub fn localpart(&self) -> &str { + &self[1..self.colon_pos()] + } + + /// Returns the `server_name` / `domain` of the user_id. + pub fn server_name(&self) -> &str { + &self[self.colon_pos() + 1..] + } + + /// Returns the position of the ':' inside of the user_id. + /// Used when splitting the user_id into it's respective parts. + fn colon_pos(&self) -> usize { + self.find(':').unwrap() + } +} + +impl TryFrom<&str> for UserID { + type Error = IdentifierError; + + /// Will try creating a `UserID` from the provided `&str`. + /// Can fail if the user_id is incorrectly formatted. + fn try_from(s: &str) -> Result { + if !s.starts_with('@') { + return Err(IdentifierError::IncorrectSigil); + } + + if s.find(':').is_none() { + return Err(IdentifierError::MissingColon); + } + + Ok(UserID(s.to_string())) + } +} + +impl Deref for UserID { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl fmt::Display for UserID { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 06477880b9..5de9238326 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -6,6 +6,8 @@ pub mod acl; pub mod errors; pub mod events; pub mod http; +pub mod identifier; +pub mod matrix_const; pub mod push; pub mod rendezvous; diff --git a/rust/src/matrix_const.rs b/rust/src/matrix_const.rs new file mode 100644 index 0000000000..f75f3bd7c3 --- /dev/null +++ b/rust/src/matrix_const.rs @@ -0,0 +1,28 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +//! # Matrix Constants +//! +//! This module contains definitions for constant values described by the matrix specification. + +pub const HISTORY_VISIBILITY_WORLD_READABLE: &str = "world_readable"; +pub const HISTORY_VISIBILITY_SHARED: &str = "shared"; +pub const HISTORY_VISIBILITY_INVITED: &str = "invited"; +pub const HISTORY_VISIBILITY_JOINED: &str = "joined"; + +pub const MEMBERSHIP_BAN: &str = "ban"; +pub const MEMBERSHIP_LEAVE: &str = "leave"; +pub const MEMBERSHIP_KNOCK: &str = "knock"; +pub const MEMBERSHIP_INVITE: &str = "invite"; +pub const MEMBERSHIP_JOIN: &str = "join"; diff --git a/rust/src/push/utils.rs b/rust/src/push/utils.rs index 28ebed62c8..59536c9954 100644 --- a/rust/src/push/utils.rs +++ b/rust/src/push/utils.rs @@ -23,7 +23,6 @@ use anyhow::bail; use anyhow::Context; use anyhow::Error; use lazy_static::lazy_static; -use regex; use regex::Regex; use regex::RegexBuilder; diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index b26ce25d71..3411179a2a 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -365,11 +365,6 @@ class ExperimentalConfig(Config): # MSC3874: Filtering /messages with rel_types / not_rel_types. self.msc3874_enabled: bool = experimental.get("msc3874_enabled", False) - # MSC3886: Simple client rendezvous capability - self.msc3886_endpoint: Optional[str] = experimental.get( - "msc3886_endpoint", None - ) - # MSC3890: Remotely silence local notifications # Note: This option requires "experimental_features.msc3391_enabled" to be # set to "true", in order to communicate account data deletions to clients. diff --git a/synapse/config/server.py b/synapse/config/server.py index 6a8c7cb1c9..ad7331de42 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -215,9 +215,6 @@ class HttpListenerConfig: additional_resources: Dict[str, dict] = attr.Factory(dict) tag: Optional[str] = None request_id_header: Optional[str] = None - # If true, the listener will return CORS response headers compatible with MSC3886: - # https://github.com/matrix-org/matrix-spec-proposals/pull/3886 - experimental_cors_msc3886: bool = False @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -1004,7 +1001,6 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig: additional_resources=listener.get("additional_resources", {}), tag=listener.get("tag"), request_id_header=listener.get("request_id_header"), - experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False), ) if socket_path: diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 204965afee..df3010ecf6 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -196,7 +196,9 @@ class MessageHandler: AuthError (403) if the user doesn't have permission to view members of this room. """ - state_filter = state_filter or StateFilter.all() + if state_filter is None: + state_filter = StateFilter.all() + user_id = requester.user.to_string() if at_token: diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index df9a088063..350c3fa09a 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1520,7 +1520,7 @@ class SyncHandler: if sync_config.use_state_after: delta_state_ids: MutableStateMap[str] = {} - if members_to_fetch is not None: + if members_to_fetch: # We're lazy-loading, so the client might need some more member # events to understand the events in this timeline. So we always # fish out all the member events corresponding to the timeline diff --git a/synapse/http/server.py b/synapse/http/server.py index 3e2d94d399..792961a147 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -921,15 +921,6 @@ def set_cors_headers(request: "SynapseRequest") -> None: b"Access-Control-Expose-Headers", b"Synapse-Trace-Id, Server, ETag", ) - elif request.experimental_cors_msc3886: - request.setHeader( - b"Access-Control-Allow-Headers", - b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match", - ) - request.setHeader( - b"Access-Control-Expose-Headers", - b"ETag, Location, X-Max-Bytes", - ) else: request.setHeader( b"Access-Control-Allow-Headers", diff --git a/synapse/http/site.py b/synapse/http/site.py index 8bf63edd36..1cd90cb9b7 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -94,7 +94,6 @@ class SynapseRequest(Request): self.reactor = site.reactor self._channel = channel # this is used by the tests self.start_time = 0.0 - self.experimental_cors_msc3886 = site.experimental_cors_msc3886 # The requester, if authenticated. For federation requests this is the # server name, for client requests this is the Requester object. @@ -666,10 +665,6 @@ class SynapseSite(ProxySite): request_id_header = config.http_options.request_id_header - self.experimental_cors_msc3886: bool = ( - config.http_options.experimental_cors_msc3886 - ) - def request_factory(channel: HTTPChannel, queued: bool) -> Request: return request_class( channel, diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 0b74209232..f4d25a7b8b 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -259,7 +259,7 @@ class MediaRepository: """ media = await self.store.get_local_media(media_id) if media is None: - raise SynapseError(404, "Unknow media ID", errcode=Codes.NOT_FOUND) + raise NotFoundError("Unknown media ID") if media.user_id != auth_user.to_string(): raise SynapseError( diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py index 27bf53314a..02f166b4ea 100644 --- a/synapse/rest/client/rendezvous.py +++ b/synapse/rest/client/rendezvous.py @@ -34,51 +34,6 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -# n.b [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) has now been closed. -# However, we want to keep this implementation around for some time. -# TODO: define an end-of-life date for this implementation. -class MSC3886RendezvousServlet(RestServlet): - """ - This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) - simple client rendezvous capability that is used by the "Sign in with QR" functionality. - - This implementation only serves as a 307 redirect to a configured server rather than being a full implementation. - - A module that implements the full functionality is available at: https://pypi.org/project/matrix-http-rendezvous-synapse/. - - Request: - - POST /rendezvous HTTP/1.1 - Content-Type: ... - - ... - - Response: - - HTTP/1.1 307 - Location: - """ - - PATTERNS = client_patterns( - "/org.matrix.msc3886/rendezvous$", releases=[], v1=False, unstable=True - ) - - def __init__(self, hs: "HomeServer"): - super().__init__() - redirection_target: Optional[str] = hs.config.experimental.msc3886_endpoint - assert ( - redirection_target is not None - ), "Servlet is only registered if there is a redirection target" - self.endpoint = redirection_target.encode("utf-8") - - async def on_POST(self, request: SynapseRequest) -> None: - respond_with_redirect( - request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True - ) - - # PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target. - - class MSC4108DelegationRendezvousServlet(RestServlet): PATTERNS = client_patterns( "/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True @@ -114,9 +69,6 @@ class MSC4108RendezvousServlet(RestServlet): def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: - if hs.config.experimental.msc3886_endpoint is not None: - MSC3886RendezvousServlet(hs).register(http_server) - if hs.config.experimental.msc4108_enabled: MSC4108RendezvousServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 8028cf8ad2..ba1141bbe5 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -149,9 +149,6 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc3881": msc3881_enabled, # Adds support for filtering /messages by event relation. "org.matrix.msc3874": self.config.experimental.msc3874_enabled, - # Adds support for simple HTTP rendezvous as per MSC3886 - "org.matrix.msc3886": self.config.experimental.msc3886_endpoint - is not None, # Adds support for relation-based redactions as per MSC3912. "org.matrix.msc3912": self.config.experimental.msc3912_enabled, # Whether recursively provide relations is supported. diff --git a/synapse/rest/media/upload_resource.py b/synapse/rest/media/upload_resource.py index 5ef6bf8836..359d006f04 100644 --- a/synapse/rest/media/upload_resource.py +++ b/synapse/rest/media/upload_resource.py @@ -94,7 +94,7 @@ class BaseUploadServlet(RestServlet): # if headers.hasHeader(b"Content-Disposition"): # disposition = headers.getRawHeaders(b"Content-Disposition")[0] - # TODO(markjh): parse content-dispostion + # TODO(markjh): parse content-disposition return content_length, upload_name, media_type diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index b50eb8868e..f28f5d7e03 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -234,8 +234,11 @@ class StateStorageController: RuntimeError if we don't have a state group for one or more of the events (ie they are outliers or unknown) """ + if state_filter is None: + state_filter = StateFilter.all() + await_full_state = True - if state_filter and not state_filter.must_await_full_state(self._is_mine_id): + if not state_filter.must_await_full_state(self._is_mine_id): await_full_state = False event_to_groups = await self.get_state_group_for_events( @@ -244,7 +247,7 @@ class StateStorageController: groups = set(event_to_groups.values()) group_to_state = await self.stores.state._get_state_for_groups( - groups, state_filter or StateFilter.all() + groups, state_filter ) state_event_map = await self.stores.main.get_events( @@ -292,10 +295,11 @@ class StateStorageController: RuntimeError if we don't have a state group for one or more of the events (ie they are outliers or unknown) """ - if ( - await_full_state - and state_filter - and not state_filter.must_await_full_state(self._is_mine_id) + if state_filter is None: + state_filter = StateFilter.all() + + if await_full_state and not state_filter.must_await_full_state( + self._is_mine_id ): # Full state is not required if the state filter is restrictive enough. await_full_state = False @@ -306,7 +310,7 @@ class StateStorageController: groups = set(event_to_groups.values()) group_to_state = await self.stores.state._get_state_for_groups( - groups, state_filter or StateFilter.all() + groups, state_filter ) event_to_state = { @@ -335,9 +339,10 @@ class StateStorageController: RuntimeError if we don't have a state group for the event (ie it is an outlier or is unknown) """ - state_map = await self.get_state_for_events( - [event_id], state_filter or StateFilter.all() - ) + if state_filter is None: + state_filter = StateFilter.all() + + state_map = await self.get_state_for_events([event_id], state_filter) return state_map[event_id] @trace @@ -365,9 +370,12 @@ class StateStorageController: RuntimeError if we don't have a state group for the event (ie it is an outlier or is unknown) """ + if state_filter is None: + state_filter = StateFilter.all() + state_map = await self.get_state_ids_for_events( [event_id], - state_filter or StateFilter.all(), + state_filter, await_full_state=await_full_state, ) return state_map[event_id] @@ -388,9 +396,12 @@ class StateStorageController: at the event and `state_filter` is not satisfied by partial state. Defaults to `True`. """ + if state_filter is None: + state_filter = StateFilter.all() + state_ids = await self.get_state_ids_for_event( event_id, - state_filter=state_filter or StateFilter.all(), + state_filter=state_filter, await_full_state=await_full_state, ) @@ -426,6 +437,9 @@ class StateStorageController: at the last event in the room before `stream_position` and `state_filter` is not satisfied by partial state. Defaults to `True`. """ + if state_filter is None: + state_filter = StateFilter.all() + # FIXME: This gets the state at the latest event before the stream ordering, # which might not be the same as the "current state" of the room at the time # of the stream token if there were multiple forward extremities at the time. @@ -442,7 +456,7 @@ class StateStorageController: if last_event_id: state = await self.get_state_after_event( last_event_id, - state_filter=state_filter or StateFilter.all(), + state_filter=state_filter, await_full_state=await_full_state, ) @@ -500,9 +514,10 @@ class StateStorageController: Returns: Dict of state group to state map. """ - return await self.stores.state._get_state_for_groups( - groups, state_filter or StateFilter.all() - ) + if state_filter is None: + state_filter = StateFilter.all() + + return await self.stores.state._get_state_for_groups(groups, state_filter) @trace @tag_args @@ -583,12 +598,13 @@ class StateStorageController: Returns: The current state of the room. """ - if await_full_state and ( - not state_filter or state_filter.must_await_full_state(self._is_mine_id) - ): + if state_filter is None: + state_filter = StateFilter.all() + + if await_full_state and state_filter.must_await_full_state(self._is_mine_id): await self._partial_state_room_tracker.await_full_state(room_id) - if state_filter and not state_filter.is_full(): + if state_filter is not None and not state_filter.is_full(): return await self.stores.main.get_partial_filtered_current_state_ids( room_id, state_filter ) diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 42b3638e1c..788f7d1e32 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -572,10 +572,10 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): Returns: Map from type/state_key to event ID. """ + if state_filter is None: + state_filter = StateFilter.all() - where_clause, where_args = ( - state_filter or StateFilter.all() - ).make_sql_filter_clause() + where_clause, where_args = (state_filter).make_sql_filter_clause() if not where_clause: # We delegate to the cached version @@ -584,7 +584,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): def _get_filtered_current_state_ids_txn( txn: LoggingTransaction, ) -> StateMap[str]: - results = StateMapWrapper(state_filter=state_filter or StateFilter.all()) + results = StateMapWrapper(state_filter=state_filter) sql = """ SELECT type, state_key, event_id FROM current_state_events diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index ba52fff652..117ee89d0a 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -20,18 +20,26 @@ # import logging -from typing import List, Optional, Tuple +from typing import TYPE_CHECKING, List, Optional, Tuple import attr from synapse.logging.opentracing import trace from synapse.storage._base import SQLBaseStore -from synapse.storage.database import LoggingTransaction, make_in_list_sql_clause +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + LoggingTransaction, + make_in_list_sql_clause, +) from synapse.storage.databases.main.stream import _filter_results_by_stream from synapse.types import RoomStreamToken, StrCollection from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.iterutils import batch_iter +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -54,6 +62,21 @@ class StateDeltasStore(SQLBaseStore): # attribute. TODO: can we get static analysis to enforce this? _curr_state_delta_stream_cache: StreamChangeCache + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + + self.db_pool.updates.register_background_index_update( + update_name="current_state_delta_stream_room_index", + index_name="current_state_delta_stream_room_idx", + table="current_state_delta_stream", + columns=("room_id", "stream_id"), + ) + async def get_partial_current_state_deltas( self, prev_stream_id: int, max_stream_id: int ) -> Tuple[int, List[StateDelta]]: diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py index ea7d8199a7..f7824cba0f 100644 --- a/synapse/storage/databases/state/bg_updates.py +++ b/synapse/storage/databases/state/bg_updates.py @@ -112,8 +112,8 @@ class StateGroupBackgroundUpdateStore(SQLBaseStore): Returns: Map from state_group to a StateMap at that point. """ - - state_filter = state_filter or StateFilter.all() + if state_filter is None: + state_filter = StateFilter.all() results: Dict[int, MutableStateMap[str]] = {group: {} for group in groups} diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index 875dba3349..f7a59c8992 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -284,7 +284,8 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): Returns: Dict of state group to state map. """ - state_filter = state_filter or StateFilter.all() + if state_filter is None: + state_filter = StateFilter.all() member_filter, non_member_filter = state_filter.get_member_split() diff --git a/synapse/storage/schema/main/delta/88/04_current_state_delta_index.sql b/synapse/storage/schema/main/delta/88/04_current_state_delta_index.sql new file mode 100644 index 0000000000..ad54302a8f --- /dev/null +++ b/synapse/storage/schema/main/delta/88/04_current_state_delta_index.sql @@ -0,0 +1,18 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2024 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + + +-- Add an index on (user_id, device_id, algorithm, ts_added_ms) on e2e_one_time_keys_json, so that OTKs can +-- efficiently be issued in the same order they were uploaded. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (8804, 'current_state_delta_stream_room_index', '{}'); diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index 1682d0d151..7d3422572d 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -10,7 +10,7 @@ # See the GNU Affero General Public License for more details: # . -from typing import Optional +from typing import List, Mapping, Optional, Tuple from synapse.types import JsonDict @@ -105,3 +105,29 @@ class EventInternalMetadata: def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" + +def event_visible_to_server( + sender: str, + target_server_name: str, + history_visibility: str, + erased_senders: Mapping[str, bool], + partial_state_invisible: bool, + memberships: List[Tuple[str, str]], +) -> bool: + """Determine whether the server is allowed to see the unredacted event. + + Args: + sender: The sender of the event. + target_server_name: The server we want to send the event to. + history_visibility: The history_visibility value at the event. + erased_senders: A mapping of users and whether they have requested erasure. If a + user is not in the map, it is treated as though they haven't requested erasure. + partial_state_invisible: Whether the event should be treated as invisible due to + the partial state status of the room. + memberships: A list of membership state information at the event for users + matching the `target_server_name`. Each list item must contain a tuple of + (state_key, membership). + + Returns: + Whether the server is allowed to see the unredacted event. + """ diff --git a/synapse/types/state.py b/synapse/types/state.py index 67d1c3fe97..e641215f18 100644 --- a/synapse/types/state.py +++ b/synapse/types/state.py @@ -68,15 +68,23 @@ class StateFilter: include_others: bool = False def __attrs_post_init__(self) -> None: - # If `include_others` is set we canonicalise the filter by removing - # wildcards from the types dictionary if self.include_others: + # If `include_others` is set we canonicalise the filter by removing + # wildcards from the types dictionary + # this is needed to work around the fact that StateFilter is frozen object.__setattr__( self, "types", immutabledict({k: v for k, v in self.types.items() if v is not None}), ) + else: + # Otherwise we remove entries where the value is the empty set. + object.__setattr__( + self, + "types", + immutabledict({k: v for k, v in self.types.items() if v is None or v}), + ) @staticmethod def all() -> "StateFilter": diff --git a/synapse/visibility.py b/synapse/visibility.py index 3a2782bade..dc7b6e4065 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -27,7 +27,6 @@ from typing import ( Final, FrozenSet, List, - Mapping, Optional, Sequence, Set, @@ -48,6 +47,7 @@ from synapse.events.utils import clone_event, prune_event from synapse.logging.opentracing import trace from synapse.storage.controllers import StorageControllers from synapse.storage.databases.main import DataStore +from synapse.synapse_rust.events import event_visible_to_server from synapse.types import RetentionPolicy, StateMap, StrCollection, get_domain_from_id from synapse.types.state import StateFilter from synapse.util import Clock @@ -628,17 +628,6 @@ async def filter_events_for_server( """Filter a list of events based on whether the target server is allowed to see them. - For a fully stated room, the target server is allowed to see an event E if: - - the state at E has world readable or shared history vis, OR - - the state at E says that the target server is in the room. - - For a partially stated room, the target server is allowed to see E if: - - E was created by this homeserver, AND: - - the partial state at E has world readable or shared history vis, OR - - the partial state at E says that the target server is in the room. - - TODO: state before or state after? - Args: storage target_server_name @@ -655,35 +644,6 @@ async def filter_events_for_server( The filtered events. """ - def is_sender_erased(event: EventBase, erased_senders: Mapping[str, bool]) -> bool: - if erased_senders and erased_senders[event.sender]: - logger.info("Sender of %s has been erased, redacting", event.event_id) - return True - return False - - def check_event_is_visible( - visibility: str, memberships: StateMap[EventBase] - ) -> bool: - if visibility not in (HistoryVisibility.INVITED, HistoryVisibility.JOINED): - return True - - # We now loop through all membership events looking for - # membership states for the requesting server to determine - # if the server is either in the room or has been invited - # into the room. - for ev in memberships.values(): - assert get_domain_from_id(ev.state_key) == target_server_name - - memtype = ev.membership - if memtype == Membership.JOIN: - return True - elif memtype == Membership.INVITE: - if visibility == HistoryVisibility.INVITED: - return True - - # server has no users in the room: redact - return False - if filter_out_erased_senders: erased_senders = await storage.main.are_users_erased(e.sender for e in events) else: @@ -726,20 +686,16 @@ async def filter_events_for_server( target_server_name, ) - def include_event_in_output(e: EventBase) -> bool: - erased = is_sender_erased(e, erased_senders) - visible = check_event_is_visible( - event_to_history_vis[e.event_id], event_to_memberships.get(e.event_id, {}) - ) - - if e.event_id in partial_state_invisible_event_ids: - visible = False - - return visible and not erased - to_return = [] for e in events: - if include_event_in_output(e): + if event_visible_to_server( + sender=e.sender, + target_server_name=target_server_name, + history_visibility=event_to_history_vis[e.event_id], + erased_senders=erased_senders, + partial_state_invisible=e.event_id in partial_state_invisible_event_ids, + memberships=list(event_to_memberships.get(e.event_id, {}).values()), + ): to_return.append(e) elif redact: to_return.append(prune_event(e)) @@ -796,7 +752,7 @@ async def _event_to_history_vis( async def _event_to_memberships( storage: StorageControllers, events: Collection[EventBase], server_name: str -) -> Dict[str, StateMap[EventBase]]: +) -> Dict[str, StateMap[Tuple[str, str]]]: """Get the remote membership list at each of the given events Returns a map from event id to state map, which will contain only membership events @@ -849,7 +805,7 @@ async def _event_to_memberships( return { e_id: { - key: event_map[inner_e_id] + key: (event_map[inner_e_id].state_key, event_map[inner_e_id].membership) for key, inner_e_id in key_to_eid.items() if inner_e_id in event_map } diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 1960d2f0e1..9dd0e98971 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -1262,3 +1262,35 @@ class SyncStateAfterTestCase(tests.unittest.HomeserverTestCase): ) ) self.assertEqual(state[("m.test_event", "")], second_state["event_id"]) + + def test_incremental_sync_lazy_loaded_no_timeline(self) -> None: + """Test that lazy-loading with an empty timeline doesn't return the full + state. + + There was a bug where an empty state filter would cause the DB to return + the full state, rather than an empty set. + """ + user = self.register_user("user", "password") + tok = self.login("user", "password") + + # Create a room as the user and set some custom state. + joined_room = self.helper.create_room_as(user, tok=tok) + + since_token = self.hs.get_event_sources().get_current_token() + end_stream_token = self.hs.get_event_sources().get_current_token() + + state = self.get_success( + self.sync_handler._compute_state_delta_for_incremental_sync( + room_id=joined_room, + sync_config=generate_sync_config(user, use_state_after=True), + batch=TimelineBatch( + prev_batch=end_stream_token, events=[], limited=True + ), + since_token=since_token, + end_token=end_stream_token, + members_to_fetch=set(), + timeline_state={}, + ) + ) + + self.assertEqual(state, {}) diff --git a/tests/logging/test_terse_json.py b/tests/logging/test_terse_json.py index ff85e067b7..33b94cf9fa 100644 --- a/tests/logging/test_terse_json.py +++ b/tests/logging/test_terse_json.py @@ -164,7 +164,6 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase): site.site_tag = "test-site" site.server_version_string = "Server v1" site.reactor = Mock() - site.experimental_cors_msc3886 = False request = SynapseRequest( cast(HTTPChannel, FakeChannel(site, self.reactor)), site ) diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index c2015774a1..d5ae3345f5 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -96,7 +96,7 @@ class FederationTestCase(unittest.HomeserverTestCase): self.assertEqual(400, channel.code, msg=channel.json_body) self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) - # unkown order_by + # unknown order_by channel = self.make_request( "GET", self.url + "?order_by=bar", diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py index 5f60e19e56..07ec49c4e5 100644 --- a/tests/rest/admin/test_statistics.py +++ b/tests/rest/admin/test_statistics.py @@ -82,7 +82,7 @@ class UserMediaStatisticsTestCase(unittest.HomeserverTestCase): """ If parameters are invalid, an error is returned. """ - # unkown order_by + # unknown order_by channel = self.make_request( "GET", self.url + "?order_by=bar", diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index f9ae50f40a..668ccb89ff 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -719,7 +719,7 @@ class UsersListTestCase(unittest.HomeserverTestCase): self.assertEqual(400, channel.code, msg=channel.json_body) self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) - # unkown order_by + # unknown order_by channel = self.make_request( "GET", self.url + "?order_by=bar", @@ -3696,7 +3696,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase): @parameterized.expand(["GET", "DELETE"]) def test_invalid_parameter(self, method: str) -> None: """If parameters are invalid, an error is returned.""" - # unkown order_by + # unknown order_by channel = self.make_request( method, self.url + "?order_by=bar", diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index 0ab754a11a..ab701680a6 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -34,7 +34,6 @@ from tests import unittest from tests.unittest import override_config from tests.utils import HAS_AUTHLIB -msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous" msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous" @@ -54,17 +53,9 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): } def test_disabled(self) -> None: - channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) - self.assertEqual(channel.code, 404) channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None) self.assertEqual(channel.code, 404) - @override_config({"experimental_features": {"msc3886_endpoint": "/asd"}}) - def test_msc3886_redirect(self) -> None: - channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) - self.assertEqual(channel.code, 307) - self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"]) - @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") @override_config( { diff --git a/tests/server.py b/tests/server.py index 23c81203a5..84ed9f68eb 100644 --- a/tests/server.py +++ b/tests/server.py @@ -343,7 +343,6 @@ class FakeSite: self, resource: IResource, reactor: IReactorTime, - experimental_cors_msc3886: bool = False, ): """ @@ -352,7 +351,6 @@ class FakeSite: """ self._resource = resource self.reactor = reactor - self.experimental_cors_msc3886 = experimental_cors_msc3886 def getResourceFor(self, request: Request) -> IResource: return self._resource diff --git a/tests/test_server.py b/tests/test_server.py index 9ff2589497..9cb6766b5f 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -233,9 +233,7 @@ class OptionsResourceTests(unittest.TestCase): self.resource = OptionsResource() self.resource.putChild(b"res", DummyResource()) - def _make_request( - self, method: bytes, path: bytes, experimental_cors_msc3886: bool = False - ) -> FakeChannel: + def _make_request(self, method: bytes, path: bytes) -> FakeChannel: """Create a request from the method/path and return a channel with the response.""" # Create a site and query for the resource. site = SynapseSite( @@ -246,7 +244,6 @@ class OptionsResourceTests(unittest.TestCase): { "type": "http", "port": 0, - "experimental_cors_msc3886": experimental_cors_msc3886, }, ), self.resource, @@ -283,32 +280,6 @@ class OptionsResourceTests(unittest.TestCase): [b"Synapse-Trace-Id, Server"], ) - def _check_cors_msc3886_headers(self, channel: FakeChannel) -> None: - # Ensure the correct CORS headers have been added - # as per https://github.com/matrix-org/matrix-spec-proposals/blob/hughns/simple-rendezvous-capability/proposals/3886-simple-rendezvous-capability.md#cors - self.assertEqual( - channel.headers.getRawHeaders(b"Access-Control-Allow-Origin"), - [b"*"], - "has correct CORS Origin header", - ) - self.assertEqual( - channel.headers.getRawHeaders(b"Access-Control-Allow-Methods"), - [b"GET, HEAD, POST, PUT, DELETE, OPTIONS"], # HEAD isn't in the spec - "has correct CORS Methods header", - ) - self.assertEqual( - channel.headers.getRawHeaders(b"Access-Control-Allow-Headers"), - [ - b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match" - ], - "has correct CORS Headers header", - ) - self.assertEqual( - channel.headers.getRawHeaders(b"Access-Control-Expose-Headers"), - [b"ETag, Location, X-Max-Bytes"], - "has correct CORS Expose Headers header", - ) - def test_unknown_options_request(self) -> None: """An OPTIONS requests to an unknown URL still returns 204 No Content.""" channel = self._make_request(b"OPTIONS", b"/foo/") @@ -325,16 +296,6 @@ class OptionsResourceTests(unittest.TestCase): self._check_cors_standard_headers(channel) - def test_known_options_request_msc3886(self) -> None: - """An OPTIONS requests to an known URL still returns 204 No Content.""" - channel = self._make_request( - b"OPTIONS", b"/res/", experimental_cors_msc3886=True - ) - self.assertEqual(channel.code, 204) - self.assertNotIn("body", channel.result) - - self._check_cors_msc3886_headers(channel) - def test_unknown_request(self) -> None: """A non-OPTIONS request to an unknown URL should 404.""" channel = self._make_request(b"GET", b"/foo/")