Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes

This commit is contained in:
Patrick Cloke 2023-10-02 09:08:23 -04:00
commit 656ffa23c9
112 changed files with 1551 additions and 598 deletions

View file

@ -24,13 +24,13 @@ jobs:
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Inspect builder
run: docker buildx inspect
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see

View file

@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View file

@ -12,7 +12,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@ -39,7 +39,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0

View file

@ -50,7 +50,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@ -80,7 +80,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: "Set up Sphinx"
uses: matrix-org/setup-python-poetry@v1

View file

@ -39,7 +39,7 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
@ -72,7 +72,7 @@ jobs:
postgres-version: "14"
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@ -145,7 +145,7 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@ -192,8 +192,8 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
with:
path: synapse
@ -222,7 +222,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -16,7 +16,7 @@ jobs:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.x'

View file

@ -33,17 +33,17 @@ jobs:
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@v3
uses: actions/checkout@v4
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@v3
uses: actions/checkout@v4
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@v3
uses: actions/checkout@v4
if: github.event_name == 'push'
with:
ref: master

View file

@ -27,7 +27,7 @@ jobs:
name: "Calculate list of debian distros"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.x'
@ -55,13 +55,13 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: src
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
with:
install: true
@ -121,7 +121,7 @@ jobs:
arch: aarch64
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
@ -167,7 +167,7 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.10'

View file

@ -12,12 +12,19 @@ concurrency:
cancel-in-progress: true
jobs:
check-signoff:
if: "github.event_name == 'pull_request'"
uses: "matrix-org/backend-meta/.github/workflows/sign-off.yml@v2"
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code.
changes:
runs-on: ubuntu-latest
outputs:
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
steps:
- uses: dorny/paths-filter@v2
id: filter
@ -29,11 +36,47 @@ jobs:
- 'rust/**'
- 'Cargo.toml'
- 'Cargo.lock'
- '.rustfmt.toml'
trial:
- 'synapse/**'
- 'tests/**'
- 'rust/**'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
integration:
- 'synapse/**'
- 'rust/**'
- 'docker/**'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
- 'docker/**'
linting:
- 'synapse/**'
- 'docker/**'
- 'tests/**'
- 'scripts-dev/**'
- 'contrib/**'
- 'synmark/**'
- 'stubs/**'
- '.ci/**'
- 'mypy.ini'
- 'pyproject.toml'
- 'poetry.lock'
check-sampleconfig:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
@ -47,8 +90,11 @@ jobs:
check-schema-delta:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -58,7 +104,7 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -66,9 +112,12 @@ jobs:
lint:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
@ -88,9 +137,12 @@ jobs:
lint-mypy:
runs-on: ubuntu-latest
name: Typechecking
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
@ -123,7 +175,7 @@ jobs:
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
@ -131,7 +183,7 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
@ -145,8 +197,11 @@ jobs:
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
@ -164,7 +219,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
@ -182,7 +237,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@ -199,7 +254,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@ -225,6 +280,7 @@ jobs:
- check-lockfile
- lint-clippy
- lint-rustfmt
- check-signoff
runs-on: ubuntu-latest
steps:
- run: "true"
@ -234,7 +290,7 @@ jobs:
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -245,15 +301,17 @@ jobs:
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
trial:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: calculate-test-jobs
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
needs:
- calculate-test-jobs
- changes
runs-on: ubuntu-latest
strategy:
matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
@ -301,11 +359,13 @@ jobs:
trial-olddeps:
# Note: sqlite only; no postgres
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
needs:
- linting-done
- changes
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
@ -357,8 +417,10 @@ jobs:
trial-pypy:
# Very slow; only run if the branch name includes 'pypy'
# Note: sqlite only; no postgres. Completely untested since poetry move.
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
needs: linting-done
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
needs:
- linting-done
- changes
runs-on: ubuntu-latest
strategy:
matrix:
@ -366,7 +428,7 @@ jobs:
extras: ["all"]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@v1
@ -389,8 +451,10 @@ jobs:
|| true
sytest:
if: ${{ !failure() && !cancelled() }}
needs: calculate-test-jobs
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
needs:
- calculate-test-jobs
- changes
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
@ -411,7 +475,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
@ -435,8 +499,8 @@ jobs:
/logs/**/*.log*
export-data:
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb]
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb, changes]
runs-on: ubuntu-latest
env:
TOP: ${{ github.workspace }}
@ -456,7 +520,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@v1
with:
@ -471,8 +535,10 @@ jobs:
portdb:
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
needs:
- linting-done
- changes
runs-on: ubuntu-latest
strategy:
matrix:
@ -498,7 +564,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
@ -532,8 +598,10 @@ jobs:
schema_diff
complement:
if: "${{ !failure() && !cancelled() }}"
needs: linting-done
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
needs:
- linting-done
- changes
runs-on: ubuntu-latest
strategy:
@ -550,8 +618,8 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
with:
path: synapse
@ -581,7 +649,7 @@ jobs:
- changes
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
@ -599,7 +667,7 @@ jobs:
- changes
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@ -627,9 +695,16 @@ jobs:
with:
needs: ${{ toJSON(needs) }}
# The newsfile lint may be skipped on non PR builds
# Cargo test is skipped if there is no changes on Rust code
# Various bits are skipped if there was no applicable changes.
# The newsfile and signoff lint may be skipped on non PR builds.
skippable: |
trial
trial-olddeps
sytest
portdb
export-data
complement
check-signoff
lint-newsfile
cargo-test
cargo-bench

View file

@ -40,7 +40,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@ -64,7 +64,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
@ -108,7 +108,7 @@ jobs:
- ${{ github.workspace }}:/src
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@ -163,8 +163,8 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
with:
path: synapse
@ -203,7 +203,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,13 +1,34 @@
# Synapse 1.93.0 (2023-09-26)
No significant changes since 1.93.0rc1.
## Security advisory
The following issues are fixed in 1.93.0 (and RCs).
- [GHSA-4f74-84v3-j9q5](https://github.com/matrix-org/synapse/security/advisories/GHSA-4f74-84v3-j9q5) / [CVE-2023-41335](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-41335) — Low Severity
Temporary storage of plaintext passwords during password changes.
- [GHSA-7565-cq32-vx2x](https://github.com/matrix-org/synapse/security/advisories/GHSA-7565-cq32-vx2x) / [CVE-2023-42453](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-42453) — Low Severity
Improper validation of receipts allows forged read receipts.
See the advisories for more details. If you have any questions, email security@matrix.org.
# Synapse 1.93.0rc1 (2023-09-19)
### Features
- Add automatic purge after all users have forgotten a room. Also restores purge/shutdown rooms after a Synapse restart. ([\#15488](https://github.com/matrix-org/synapse/issues/15488))
- Add automatic purge after all users have forgotten a room. ([\#15488](https://github.com/matrix-org/synapse/issues/15488))
- Restore room purge/shutdown after a Synapse restart. ([\#15488](https://github.com/matrix-org/synapse/issues/15488))
- Support resolving homeservers using `matrix-fed` DNS SRV records from [MSC4040](https://github.com/matrix-org/matrix-spec-proposals/pull/4040). ([\#16137](https://github.com/matrix-org/synapse/issues/16137))
- Add the ability to use `G` (GiB) and `T` (TiB) suffixes in configuration options that refer to numbers of bytes. ([\#16219](https://github.com/matrix-org/synapse/issues/16219))
- Add span information to requests sent to appservices. Contributed by MTRNord. ([\#16227](https://github.com/matrix-org/synapse/issues/16227))
- Add the ability to enable/disable registrations when using CAS. Contributed by Aurélien Grimpard. ([\#16262](https://github.com/matrix-org/synapse/issues/16262))
- Allow `/notifications` endpoint to be routed to workers. ([\#16265](https://github.com/matrix-org/synapse/issues/16265))
- Allow the `/notifications` endpoint to be routed to workers. ([\#16265](https://github.com/matrix-org/synapse/issues/16265))
- Enable users to easily unsubscribe to notifications emails via the `List-Unsubscribe` header. ([\#16274](https://github.com/matrix-org/synapse/issues/16274))
- Report whether a user is `locked` in the [List Accounts admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#list-accounts), and exclude locked users by default. ([\#16328](https://github.com/matrix-org/synapse/issues/16328))
@ -21,6 +42,7 @@
- Fix bug introduced in Synapse 1.49.0 when using dehydrated devices ([MSC2697](https://github.com/matrix-org/matrix-spec-proposals/pull/2697)) and refresh tokens. Contributed by Hanadi. ([\#16288](https://github.com/matrix-org/synapse/issues/16288))
- Fix a long-standing bug where invalid receipts would be accepted. ([\#16327](https://github.com/matrix-org/synapse/issues/16327))
- Use standard name for UTF-8 charset in emails. ([\#16329](https://github.com/matrix-org/synapse/issues/16329))
- Don't try refetching device lists for users on remote hosts that are marked as "down". ([\#16298](https://github.com/matrix-org/synapse/issues/16298))
### Improved Documentation
@ -45,10 +67,9 @@
- Upgrade CI run of Python 3.12 from rc1 to rc2. ([\#16280](https://github.com/matrix-org/synapse/issues/16280))
- Include values in SQL debug when using `execute_values` with Postgres. ([\#16281](https://github.com/matrix-org/synapse/issues/16281))
- Enable additional linting checks. ([\#16283](https://github.com/matrix-org/synapse/issues/16283))
- Don't try refetching device lists for users on remote hosts that are marked as "down". ([\#16298](https://github.com/matrix-org/synapse/issues/16298))
- Refactor `receipts_graph` Postgres transactions to stop error messages. ([\#16299](https://github.com/matrix-org/synapse/issues/16299))
- Small improvements to logging in replication code. ([\#16309](https://github.com/matrix-org/synapse/issues/16309))
- Remove a reference cycle for in background processes. ([\#16314](https://github.com/matrix-org/synapse/issues/16314))
- Remove a reference cycle in background processes. ([\#16314](https://github.com/matrix-org/synapse/issues/16314))
- Only use literal strings for background process names. ([\#16315](https://github.com/matrix-org/synapse/issues/16315))
- Refactor `get_user_by_id`. ([\#16316](https://github.com/matrix-org/synapse/issues/16316))
- Speed up task to delete to-device messages. ([\#16318](https://github.com/matrix-org/synapse/issues/16318))

8
Cargo.lock generated
View file

@ -291,9 +291,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.9.5"
version = "1.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47"
checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff"
dependencies = [
"aho-corasick",
"memchr",
@ -303,9 +303,9 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.3.8"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795"
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
dependencies = [
"aho-corasick",
"memchr",

1
changelog.d/14745.misc Normal file
View file

@ -0,0 +1 @@
Avoid running CI steps when the files they check have not been changed.

1
changelog.d/15691.doc Normal file
View file

@ -0,0 +1 @@
Add developer documentation concerning gradual schema migrations with column alterations.

View file

@ -0,0 +1 @@
Render plain, CSS, CSV, JSON and common image formats media content in the browser (inline) when requested through the /download endpoint.

1
changelog.d/16320.doc Normal file
View file

@ -0,0 +1 @@
Improve documentation of the user directory search algorithm.

1
changelog.d/16332.misc Normal file
View file

@ -0,0 +1 @@
Added support for pydantic v2 in addition to pydantic v1. Contributed by Maxwell G (@gotmax23).

1
changelog.d/16348.misc Normal file
View file

@ -0,0 +1 @@
Get CI to check PRs have been signed-off.

1
changelog.d/16350.misc Normal file
View file

@ -0,0 +1 @@
Improve type hints.

1
changelog.d/16355.doc Normal file
View file

@ -0,0 +1 @@
Fix rendering of user admin API documentation around deactivation. This was broken in Synapse 1.91.0.

1
changelog.d/16356.misc Normal file
View file

@ -0,0 +1 @@
Improve type hints.

1
changelog.d/16359.misc Normal file
View file

@ -0,0 +1 @@
Add missing licence header.

1
changelog.d/16360.misc Normal file
View file

@ -0,0 +1 @@
Cache server ACL checking.

View file

@ -0,0 +1 @@
Experimental support for [MSC4028](https://github.com/matrix-org/matrix-spec-proposals/pull/4028) to push all encrypted events to clients.

View file

@ -0,0 +1 @@
Remove Python version from `/_synapse/admin/v1/server_version`.

1
changelog.d/16381.misc Normal file
View file

@ -0,0 +1 @@
Improve type hints, and bump types-psycopg2 from 2.9.21.11 to 2.9.21.14.

1
changelog.d/16382.doc Normal file
View file

@ -0,0 +1 @@
Update documentation around message retention policies.

1
changelog.d/16383.misc Normal file
View file

@ -0,0 +1 @@
Improve comments in `StateGroupBackgroundUpdateStore`.

1
changelog.d/16385.misc Normal file
View file

@ -0,0 +1 @@
Minor performance improvement when sending presence to federated servers.

1
changelog.d/16387.misc Normal file
View file

@ -0,0 +1 @@
Avoid running CI steps when the files they check have not been changed.

1
changelog.d/16394.misc Normal file
View file

@ -0,0 +1 @@
Update maturin configuration.

1
changelog.d/16395.misc Normal file
View file

@ -0,0 +1 @@
Improve type hints.

1
changelog.d/16401.misc Normal file
View file

@ -0,0 +1 @@
Downgrade replication stream time out error log lines to warning.

6
debian/changelog vendored
View file

@ -1,3 +1,9 @@
matrix-synapse-py3 (1.93.0) stable; urgency=medium
* New Synapse release 1.93.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Sep 2023 15:54:40 +0100
matrix-synapse-py3 (1.93.0~rc1) stable; urgency=medium
* New synapse release 1.93.0rc1.

View file

@ -148,7 +148,6 @@ Body parameters:
- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator,
granting them access to the Admin API, among other things.
- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged.
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
Note: the `password` field must also be set if both of the following are true:
- `deactivated` is set to `false` and the user was previously deactivated (you are reactivating this user)
@ -158,6 +157,7 @@ Body parameters:
Note: a user cannot be erased with this API. For more details on
deactivating and erasing users see [Deactivate Account](#deactivate-account).
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
- `user_type` - **string** or null, optional. If not provided, the user type will be
not be changed. If `null` is given, the user type will be cleared.
Other allowed options are: `bot` and `support`.

View file

@ -1,7 +1,7 @@
# Version API
This API returns the running Synapse version and the Python version
on which Synapse is being run. This is useful when a Synapse instance
This API returns the running Synapse version.
This is useful when a Synapse instance
is behind a proxy that does not forward the 'Server' header (which also
contains Synapse version information).
@ -15,7 +15,9 @@ It returns a JSON body like the following:
```json
{
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
"python_version": "3.7.8"
"server_version": "0.99.2rc1 (b=develop, abcdef123)"
}
```
*Changed in Synapse 1.94.0:* The `python_version` key was removed from the
response body.

View file

@ -184,3 +184,160 @@ version `3`, that can only happen with a hash collision, which we basically hope
will never happen (SHA256 has a massive big key space).
## Worked examples of gradual migrations
Some migrations need to be performed gradually. A prime example of this is anything
which would need to do a large table scan — including adding columns, indices or
`NOT NULL` constraints to non-empty tables — such a migration should be done as a
background update where possible, at least on Postgres.
We can afford to be more relaxed about SQLite databases since they are usually
used on smaller deployments and SQLite does not support the same concurrent
DDL operations as Postgres.
We also typically insist on having at least one Synapse version's worth of
backwards compatibility, so that administrators can roll back Synapse if an upgrade
did not go smoothly.
This sometimes results in having to plan a migration across multiple versions
of Synapse.
This section includes an example and may include more in the future.
### Transforming a column into another one, with `NOT NULL` constraints
This example illustrates how you would introduce a new column, write data into it
based on data from an old column and then drop the old column.
We are aiming for semantic equivalence to:
```sql
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
UPDATE mytable SET new_column = old_column * 100;
ALTER TABLE mytable ALTER COLUMN new_column ADD CONSTRAINT NOT NULL;
ALTER TABLE mytable DROP COLUMN old_column;
```
#### Synapse version `N`
```python
SCHEMA_VERSION = S
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
```
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
#### Synapse version `N + 1`
```python
SCHEMA_VERSION = S + 1
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
```
**Changes:**
1.
```sql
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
```
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
2. `new_column` is written to by Synapse.
**Notes:**
1. `new_column` can't have a `NOT NULL NOT VALID` constraint yet, because the previous Synapse version did not write to the new column (since we haven't bumped the `SCHEMA_COMPAT_VERSION` yet, we still need to be compatible with the previous version).
#### Synapse version `N + 2`
```python
SCHEMA_VERSION = S + 2
SCHEMA_COMPAT_VERSION = S + 1 # this signals that we can't roll back to a time before new_column existed
```
**Changes:**
1. On Postgres, add a `NOT VALID` constraint to ensure new rows are compliant. *SQLite does not have such a construct, but it would be unnecessary anyway since there is no way to concurrently perform this migration on SQLite.*
```sql
ALTER TABLE mytable ADD CONSTRAINT CHECK new_column_not_null (new_column IS NOT NULL) NOT VALID;
```
2. Start a background update to perform migration: it should gradually run e.g.
```sql
UPDATE mytable SET new_column = old_column * 100 WHERE 0 < mytable_id AND mytable_id <= 5;
```
This background update is technically pointless on SQLite, but you must schedule it anyway so that the `portdb` script to migrate to Postgres still works.
3. Upon completion of the background update, you should run `VALIDATE CONSTRAINT` on Postgres to turn the `NOT VALID` constraint into a valid one.
```sql
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
```
This will take some time but does **NOT** hold an exclusive lock over the table.
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
2. `new_column` is written to by Synapse and new rows always have a non-`NULL` value in this field.
**Notes:**
1. If you wish, you can convert the `CHECK (new_column IS NOT NULL)` to a `NOT NULL` constraint free of charge in Postgres by adding the `NOT NULL` constraint and then dropping the `CHECK` constraint, because Postgres can statically verify that the `NOT NULL` constraint is implied by the `CHECK` constraint without performing a table scan.
2. It might be tempting to make version `N + 2` redundant by moving the background update to `N + 1` and delaying adding the `NOT NULL` constraint to `N + 3`, but that would mean the constraint would always be validated in the foreground in `N + 3`. Whereas if the `N + 2` step is kept, the migration in `N + 3` would be fast in the happy case.
#### Synapse version `N + 3`
```python
SCHEMA_VERSION = S + 3
SCHEMA_COMPAT_VERSION = S + 1 # we can't roll back to a time before new_column existed
```
**Changes:**
1. (Postgres) Update the table to populate values of `new_column` in case the background update had not completed. Additionally, `VALIDATE CONSTRAINT` to make the check fully valid.
```sql
-- you ideally want an index on `new_column` or e.g. `(new_column) WHERE new_column IS NULL` first, or perhaps you can find a way to skip this if the `NOT NULL` constraint has already been validated.
UPDATE mytable SET new_column = old_column * 100 WHERE new_column IS NULL;
-- this is a no-op if it already ran as part of the background update
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
```
2. (SQLite) Recreate the table by precisely following [the 12-step procedure for SQLite table schema changes](https://www.sqlite.org/lang_altertable.html#otheralter).
During this table rewrite, you should recreate `new_column` as `NOT NULL` and populate any outstanding `NULL` values at the same time.
Unfortunately, you can't drop `old_column` yet because it must be present for compatibility with the Postgres schema, as needed by `portdb`.
(Otherwise you could do this all in one go with SQLite!)
**Invariants:**
1. `old_column` is written to by Synapse (but no longer read by Synapse!).
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
**Notes:**
1. We can't drop `old_column` yet, or even stop writing to it, because that would break a rollback to the previous version of Synapse.
2. Application code can now rely on `new_column` being populated. The remaining steps are only motivated by the wish to clean-up old columns.
#### Synapse version `N + 4`
```python
SCHEMA_VERSION = S + 4
SCHEMA_COMPAT_VERSION = S + 3 # we can't roll back to a time before new_column was entirely non-NULL
```
**Invariants:**
1. `old_column` exists but is not written to or read from by Synapse.
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
**Notes:**
1. We can't drop `old_column` yet because that would break a rollback to the previous version of Synapse. \
**TODO:** It may be possible to relax this and drop the column straight away as long as the previous version of Synapse detected a rollback occurred and stopped attempting to write to the column. This could possibly be done by checking whether the database's schema compatibility version was `S + 3`.
#### Synapse version `N + 5`
```python
SCHEMA_VERSION = S + 5
SCHEMA_COMPAT_VERSION = S + 4 # we can't roll back to a time before old_column was no longer being touched
```
**Changes:**
1.
```sql
ALTER TABLE mytable DROP COLUMN old_column;
```

View file

@ -8,8 +8,7 @@ and allow server and room admins to configure how long messages should
be kept in a homeserver's database before being purged from it.
**Please note that, as this feature isn't part of the Matrix
specification yet, this implementation is to be considered as
experimental. There are known bugs which may cause database corruption.
Proceed with caution.**
experimental.**
A message retention policy is mainly defined by its `max_lifetime`
parameter, which defines how long a message can be kept around after

View file

@ -1026,11 +1026,8 @@ which are older than the room's maximum retention period. Synapse will also
filter events received over federation so that events that should have been
purged are ignored and not stored again.
The message retention policies feature is disabled by default. Please be advised
that enabling this feature carries some risk. There are known bugs with the implementation
which can cause database corruption. Setting retention to delete older history
is less risky than deleting newer history but in general caution is advised when enabling this
experimental feature. You can read more about this feature [here](../../message_retention_policies.md).
The message retention policies feature is disabled by default. You can read more
about this feature [here](../../message_retention_policies.md).
This setting has the following sub-options:
* `default_policy`: Default retention policy. If set, Synapse will apply it to rooms that lack the

View file

@ -1,49 +1,133 @@
User Directory API Implementation
=================================
# User Directory API Implementation
The user directory is currently maintained based on the 'visible' users
on this particular server - i.e. ones which your account shares a room with, or
who are present in a publicly viewable room present on the server.
The user directory is maintained based on users that are 'visible' to the homeserver -
i.e. ones which are local to the server and ones which any local user shares a
room with.
The directory info is stored in various tables, which can (typically after
DB corruption) get stale or out of sync. If this happens, for now the
The directory info is stored in various tables, which can sometimes get out of
sync (although this is considered a bug). If this happens, for now the
solution to fix it is to use the [admin API](usage/administration/admin_api/background_updates.md#run)
and execute the job `regenerate_directory`. This should then start a background task to
flush the current tables and regenerate the directory.
flush the current tables and regenerate the directory. Depending on the size
of your homeserver (number of users and rooms) this can take a while.
Data model
----------
## Data model
There are five relevant tables that collectively form the "user directory".
Three of them track a master list of all the users we could search for.
The last two (collectively called the "search tables") track who can
see who.
Three of them track a list of all known users. The last two (collectively called
the "search tables") track which users are visible to each other.
From all of these tables we exclude three types of local user:
- support users
- appservice users
- deactivated users
* `user_directory`. This contains the user_id, display name and avatar we'll
return when you search the directory.
- Because there's only one directory entry per user, it's important that we only
ever put publicly visible names here. Otherwise we might leak a private
- support users
- appservice users
- deactivated users
A description of each table follows:
* `user_directory`. This contains the user ID, display name and avatar of each user.
- Because there is only one directory entry per user, it is important that it
only contain publicly visible information. Otherwise, this will leak the
nickname or avatar used in a private room.
- Indexed on rooms. Indexed on users.
* `user_directory_search`. To be joined to `user_directory`. It contains an extra
column that enables full text search based on user ids and display names.
Different schemas for SQLite and Postgres with different code paths to match.
column that enables full text search based on user IDs and display names.
Different schemas for SQLite and Postgres are used.
- Indexed on the full text search data. Indexed on users.
* `user_directory_stream_pos`. When the initial background update to populate
the directory is complete, we record a stream position here. This indicates
that synapse should now listen for room changes and incrementally update
the directory where necessary.
the directory where necessary. (See [stream positions](development/synapse_architecture/streams.html).)
* `users_in_public_rooms`. Contains associations between users and the public rooms they're in.
Used to determine which users are in public rooms and should be publicly visible in the directory.
* `users_in_public_rooms`. Contains associations between users and the public
rooms they're in. Used to determine which users are in public rooms and should
be publicly visible in the directory. Both local and remote users are tracked.
* `users_who_share_private_rooms`. Rows are triples `(L, M, room id)` where `L`
is a local user and `M` is a local or remote user. `L` and `M` should be
different, but this isn't enforced by a constraint.
Note that if two local users share a room then there will be two entries:
`(user1, user2, !room_id)` and `(user2, user1, !room_id)`.
## Configuration options
The exact way user search works can be tweaked via some server-level
[configuration options](usage/configuration/config_documentation.md#user_directory).
The information is not repeated here, but the options are mentioned below.
## Search algorithm
If `search_all_users` is `false`, then results are limited to users who:
1. Are found in the `users_in_public_rooms` table, or
2. Are found in the `users_who_share_private_rooms` where `L` is the requesting
user and `M` is the search result.
Otherwise, if `search_all_users` is `true`, no such limits are placed and all
users known to the server (matching the search query) will be returned.
By default, locked users are not returned. If `show_locked_users` is `true` then
no filtering on the locked status of a user is done.
The user provided search term is lowercased and normalized using [NFKC](https://en.wikipedia.org/wiki/Unicode_equivalence#Normalization),
this treats the string as case-insensitive, canonicalizes different forms of the
same text, and maps some "roughly equivalent" characters together.
The search term is then split into words:
* If [ICU](https://en.wikipedia.org/wiki/International_Components_for_Unicode) is
available, then the system's [default locale](https://unicode-org.github.io/icu/userguide/locale/#default-locales)
will be used to break the search term into words. (See the
[installation instructions](setup/installation.md) for how to install ICU.)
* If unavailable, then runs of ASCII characters, numbers, underscores, and hypens
are considered words.
The queries for PostgreSQL and SQLite are detailed below, by their overall goal
is to find matching users, preferring users who are "real" (e.g. not bots,
not deactivated). It is assumed that real users will have an display name and
avatar set.
### PostgreSQL
The above words are then transformed into two queries:
1. "exact" which matches the parsed words exactly (using [`to_tsquery`](https://www.postgresql.org/docs/current/textsearch-controls.html#TEXTSEARCH-PARSING-QUERIES));
2. "prefix" which matches the parsed words as prefixes (using `to_tsquery`).
Results are composed of all rows in the `user_directory_search` table whose information
matches one (or both) of these queries. Results are ordered by calculating a weighted
score for each result, higher scores are returned first:
* 4x if a user ID exists.
* 1.2x if the user has a display name set.
* 1.2x if the user has an avatar set.
* 0x-3x by the full text search results using the [`ts_rank_cd` function](https://www.postgresql.org/docs/current/textsearch-controls.html#TEXTSEARCH-RANKING)
against the "exact" search query; this has four variables with the following weightings:
* `D`: 0.1 for the user ID's domain
* `C`: 0.1 for unused
* `B`: 0.9 for the user's display name (or an empty string if it is not set)
* `A`: 0.1 for the user ID's localpart
* 0x-1x by the full text search results using the `ts_rank_cd` function against the
"prefix" search query. (Using the same weightings as above.)
* If `prefer_local_users` is `true`, then 2x if the user is local to the homeserver.
Note that `ts_rank_cd` returns a weight between 0 and 1. The initial weighting of
all results is 1.
### SQLite
Results are composed of all rows in the `user_directory_search` whose information
matches the query. Results are ordered by the following information, with each
subsequent column used as a tiebreaker, for each result:
1. By the [`rank`](https://www.sqlite.org/windowfunctions.html#built_in_window_functions)
of the full text search results using the [`matchinfo` function](https://www.sqlite.org/fts3.html#matchinfo). Higher
ranks are returned first.
2. If `prefer_local_users` is `true`, then users local to the homeserver are
returned first.
3. Users with a display name set are returned first.
4. Users with an avatar set are returned first.

413
poetry.lock generated
View file

@ -11,6 +11,20 @@ files = [
{file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
]
[[package]]
name = "annotated-types"
version = "0.5.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.7"
files = [
{file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"},
{file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"},
]
[package.dependencies]
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
[[package]]
name = "astroid"
version = "2.15.0"
@ -457,34 +471,34 @@ files = [
[[package]]
name = "cryptography"
version = "41.0.3"
version = "41.0.4"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"},
{file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"},
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"},
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"},
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"},
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"},
{file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"},
{file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"},
{file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"},
{file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"},
{file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"},
{file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"},
{file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"},
{file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"},
{file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"},
{file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"},
{file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"},
{file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"},
{file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"},
{file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"},
{file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"},
{file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"},
{file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"},
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"},
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"},
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"},
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"},
{file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"},
{file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"},
{file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"},
]
[package.dependencies]
@ -586,18 +600,21 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
version = "3.1.35"
version = "3.1.37"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
files = [
{file = "GitPython-3.1.35-py3-none-any.whl", hash = "sha256:c19b4292d7a1d3c0f653858db273ff8a6614100d1eb1528b014ec97286193c09"},
{file = "GitPython-3.1.35.tar.gz", hash = "sha256:9cbefbd1789a5fe9bcf621bb34d3f441f3a90c8461d377f84eda73e721d9b06b"},
{file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"},
{file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"},
]
[package.dependencies]
gitdb = ">=4.0.1,<5"
[package.extras]
test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"]
[[package]]
name = "hiredis"
version = "2.2.3"
@ -1373,74 +1390,67 @@ files = [
[[package]]
name = "msgpack"
version = "1.0.5"
version = "1.0.7"
description = "MessagePack serializer"
optional = false
python-versions = "*"
python-versions = ">=3.8"
files = [
{file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"},
{file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"},
{file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"},
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"},
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"},
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"},
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"},
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"},
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"},
{file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"},
{file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"},
{file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"},
{file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"},
{file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"},
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"},
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"},
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"},
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"},
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"},
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"},
{file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"},
{file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"},
{file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"},
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"},
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"},
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"},
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"},
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"},
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"},
{file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"},
{file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"},
{file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"},
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"},
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"},
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"},
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"},
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"},
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"},
{file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"},
{file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"},
{file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"},
{file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"},
{file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"},
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"},
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"},
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"},
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"},
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"},
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"},
{file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"},
{file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"},
{file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"},
{file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"},
{file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"},
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"},
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"},
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"},
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"},
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"},
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"},
{file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"},
{file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"},
{file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"},
{file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"},
{file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"},
{file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"},
{file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"},
{file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"},
{file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"},
{file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"},
{file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"},
{file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"},
{file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"},
{file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"},
{file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"},
{file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"},
{file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"},
{file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"},
{file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"},
{file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"},
{file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"},
{file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"},
{file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"},
{file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"},
{file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"},
{file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"},
{file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"},
{file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"},
{file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"},
{file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"},
{file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"},
{file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"},
{file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"},
{file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"},
{file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"},
{file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"},
{file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"},
{file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"},
{file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"},
{file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"},
{file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"},
{file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"},
{file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"},
{file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"},
{file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"},
{file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"},
{file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"},
{file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"},
{file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"},
{file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"},
{file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"},
{file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"},
{file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"},
{file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"},
{file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"},
{file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"},
{file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"},
{file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"},
{file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"},
]
[[package]]
@ -1607,13 +1617,13 @@ files = [
[[package]]
name = "phonenumbers"
version = "8.13.19"
version = "8.13.22"
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
optional = false
python-versions = "*"
files = [
{file = "phonenumbers-8.13.19-py2.py3-none-any.whl", hash = "sha256:ba542f20f6dc83be8f127f240f9b5b7e7c1dec42aceff1879400d4dc0c781d81"},
{file = "phonenumbers-8.13.19.tar.gz", hash = "sha256:38180247697240ccedd74dec4bfbdbc22bb108b9c5f991f270ca3e41395e6f96"},
{file = "phonenumbers-8.13.22-py2.py3-none-any.whl", hash = "sha256:85ceeba9e67984ba98182c77e8e4c70093d38c0c6a0cb2bd392e0694ddaeb1f6"},
{file = "phonenumbers-8.13.22.tar.gz", hash = "sha256:001664c90f59b8954766c2db85adafc8dbc96177efeb49607ca4e64a7acaf569"},
]
[[package]]
@ -1739,22 +1749,22 @@ twisted = ["twisted"]
[[package]]
name = "psycopg2"
version = "2.9.7"
version = "2.9.8"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = true
python-versions = ">=3.6"
files = [
{file = "psycopg2-2.9.7-cp310-cp310-win32.whl", hash = "sha256:1a6a2d609bce44f78af4556bea0c62a5e7f05c23e5ea9c599e07678995609084"},
{file = "psycopg2-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:b22ed9c66da2589a664e0f1ca2465c29b75aaab36fa209d4fb916025fb9119e5"},
{file = "psycopg2-2.9.7-cp311-cp311-win32.whl", hash = "sha256:44d93a0109dfdf22fe399b419bcd7fa589d86895d3931b01fb321d74dadc68f1"},
{file = "psycopg2-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:91e81a8333a0037babfc9fe6d11e997a9d4dac0f38c43074886b0d9dead94fe9"},
{file = "psycopg2-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:d1210fcf99aae6f728812d1d2240afc1dc44b9e6cba526a06fb8134f969957c2"},
{file = "psycopg2-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e9b04cbef584310a1ac0f0d55bb623ca3244c87c51187645432e342de9ae81a8"},
{file = "psycopg2-2.9.7-cp38-cp38-win32.whl", hash = "sha256:d5c5297e2fbc8068d4255f1e606bfc9291f06f91ec31b2a0d4c536210ac5c0a2"},
{file = "psycopg2-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:8275abf628c6dc7ec834ea63f6f3846bf33518907a2b9b693d41fd063767a866"},
{file = "psycopg2-2.9.7-cp39-cp39-win32.whl", hash = "sha256:c7949770cafbd2f12cecc97dea410c514368908a103acf519f2a346134caa4d5"},
{file = "psycopg2-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:b6bd7d9d3a7a63faae6edf365f0ed0e9b0a1aaf1da3ca146e6b043fb3eb5d723"},
{file = "psycopg2-2.9.7.tar.gz", hash = "sha256:f00cc35bd7119f1fed17b85bd1007855194dde2cbd8de01ab8ebb17487440ad8"},
{file = "psycopg2-2.9.8-cp310-cp310-win32.whl", hash = "sha256:2f8594f92bbb5d8b59ffec04e2686c416401e2d4297de1193f8e75235937e71d"},
{file = "psycopg2-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9ecbf504c4eaff90139d5c9b95d47275f2b2651e14eba56392b4041fbf4c2b3"},
{file = "psycopg2-2.9.8-cp311-cp311-win32.whl", hash = "sha256:65f81e72136d8b9ac8abf5206938d60f50da424149a43b6073f1546063c0565e"},
{file = "psycopg2-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:f7e62095d749359b7854143843f27edd7dccfcd3e1d833b880562aa5702d92b0"},
{file = "psycopg2-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:81b21424023a290a40884c7f8b0093ba6465b59bd785c18f757e76945f65594c"},
{file = "psycopg2-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:67c2f32f3aba79afb15799575e77ee2db6b46b8acf943c21d34d02d4e1041d50"},
{file = "psycopg2-2.9.8-cp38-cp38-win32.whl", hash = "sha256:287a64ef168ef7fb9f382964705ff664b342bfff47e7242bf0a04ef203269dd5"},
{file = "psycopg2-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:dcde3cad4920e29e74bf4e76c072649764914facb2069e6b7fa1ddbebcd49e9f"},
{file = "psycopg2-2.9.8-cp39-cp39-win32.whl", hash = "sha256:d4ad050ea50a16731d219c3a85e8f2debf49415a070f0b8331ccc96c81700d9b"},
{file = "psycopg2-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:d39bb3959788b2c9d7bf5ff762e29f436172b241cd7b47529baac77746fd7918"},
{file = "psycopg2-2.9.8.tar.gz", hash = "sha256:3da6488042a53b50933244085f3f91803f1b7271f970f3e5536efa69314f6a49"},
]
[[package]]
@ -1822,55 +1832,140 @@ files = [
[[package]]
name = "pydantic"
version = "1.10.11"
description = "Data validation and settings management using python type hints"
version = "2.4.2"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"},
{file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"},
{file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"},
{file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"},
{file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"},
{file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"},
{file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"},
{file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"},
{file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"},
{file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"},
{file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"},
{file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"},
{file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"},
{file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"},
{file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"},
{file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"},
{file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"},
{file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"},
{file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"},
{file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"},
{file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"},
{file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"},
{file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"},
{file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"},
{file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"},
{file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"},
{file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"},
{file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"},
{file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"},
{file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"},
{file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"},
{file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"},
{file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"},
{file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"},
{file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"},
{file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"},
{file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"},
{file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"},
]
[package.dependencies]
typing-extensions = ">=4.2.0"
annotated-types = ">=0.4.0"
pydantic-core = "2.10.1"
typing-extensions = ">=4.6.1"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
version = "2.10.1"
description = ""
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"},
{file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"},
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"},
{file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"},
{file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"},
{file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"},
{file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"},
{file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"},
{file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"},
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"},
{file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"},
{file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"},
{file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"},
{file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"},
{file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"},
{file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"},
{file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"},
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"},
{file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"},
{file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"},
{file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"},
{file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"},
{file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"},
{file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"},
{file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"},
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"},
{file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"},
{file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"},
{file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"},
{file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"},
{file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"},
{file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"},
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"},
{file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"},
{file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"},
{file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"},
{file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"},
{file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"},
{file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"},
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"},
{file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"},
{file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"},
{file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"},
{file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"},
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"},
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"},
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"},
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"},
{file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"},
]
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygithub"
@ -2388,13 +2483,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
version = "1.30.0"
version = "1.31.0"
description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = "*"
files = [
{file = "sentry-sdk-1.30.0.tar.gz", hash = "sha256:7dc873b87e1faf4d00614afd1058bfa1522942f33daef8a59f90de8ed75cd10c"},
{file = "sentry_sdk-1.30.0-py2.py3-none-any.whl", hash = "sha256:2e53ad63f96bb9da6570ba2e755c267e529edcf58580a2c0d2a11ef26e1e678b"},
{file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"},
{file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"},
]
[package.dependencies]
@ -2404,10 +2499,12 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
[package.extras]
aiohttp = ["aiohttp (>=3.5)"]
arq = ["arq (>=0.23)"]
asyncpg = ["asyncpg (>=0.23)"]
beam = ["apache-beam (>=2.12)"]
bottle = ["bottle (>=0.12.13)"]
celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
@ -2973,13 +3070,13 @@ files = [
[[package]]
name = "types-netaddr"
version = "0.8.0.9"
version = "0.9.0.1"
description = "Typing stubs for netaddr"
optional = false
python-versions = "*"
files = [
{file = "types-netaddr-0.8.0.9.tar.gz", hash = "sha256:68900c267fd31627c1721c5c52b32a257657ac2777457dca49b6b096ba2faf74"},
{file = "types_netaddr-0.8.0.9-py3-none-any.whl", hash = "sha256:63e871f064cd59473cec1177f372526f0fa3d565050247d5305bdc325be5c3f6"},
{file = "types-netaddr-0.9.0.1.tar.gz", hash = "sha256:e04638435abad3e3b13a4a6b1b07f36619a47597fd5c10f330474196c058dfb3"},
{file = "types_netaddr-0.9.0.1-py3-none-any.whl", hash = "sha256:81b98c959d14de96eb53507ac606e8876c91413d273554a59fd42b34e3811fe0"},
]
[[package]]
@ -3006,13 +3103,13 @@ files = [
[[package]]
name = "types-psycopg2"
version = "2.9.21.11"
version = "2.9.21.14"
description = "Typing stubs for psycopg2"
optional = false
python-versions = "*"
files = [
{file = "types-psycopg2-2.9.21.11.tar.gz", hash = "sha256:d5077eacf90e61db8c0b8eea2fdc9d4a97d7aaa16865fb4bd7034a7571520b4d"},
{file = "types_psycopg2-2.9.21.11-py3-none-any.whl", hash = "sha256:7a323d7744bc8a882fb5a6f63448e903fc70d3dc0d6da9ec1f9c6c4dc10a7102"},
{file = "types-psycopg2-2.9.21.14.tar.gz", hash = "sha256:bf73a0ac4da4e278c89bf1b01fc596d5a5ac7a356cfe6ac0249f47b9e259f868"},
{file = "types_psycopg2-2.9.21.14-py3-none-any.whl", hash = "sha256:cd9c5350631f3bc6184ec8d48f2ed31d4ea660f89d0fffe78239450782f383c5"},
]
[[package]]
@ -3347,4 +3444,4 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8.0"
content-hash = "104f108b3c966be05e17cf9975b4061942b354fe9a57cbf7372371fd56b1bf24"
content-hash = "364c309486e9d93d4da8a1a3784d5ecd7d2a9734cf84dcd4a991f2cd54f0b5b5"

View file

@ -92,10 +92,11 @@ skip_gitignore = true
[tool.maturin]
manifest-path = "rust/Cargo.toml"
module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.93.0rc1"
version = "1.93.0"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@ -209,11 +210,11 @@ cryptography = ">=3.4.7"
# ijson 3.1.4 fixes a bug with "." in property names
ijson = ">=3.1.4"
matrix-common = "^1.3.0"
# We need packaging.requirements.Requirement, added in 16.1.
packaging = ">=16.1"
# This is the most recent version of Pydantic with available on common distros.
# We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858)
pydantic = "^1.7.4"
# We need packaging.verison.Version(...).major added in 20.0.
packaging = ">=20.0"
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
# See https://github.com/matrix-org/synapse/issues/15858
pydantic = ">=1.7.4, <3"
# This is for building the rust components during "poetry install", which
# currently ignores the `build-system.requires` directive (c.f.
@ -321,6 +322,8 @@ all = [
isort = ">=5.10.1"
black = ">=22.7.0"
ruff = "0.0.290"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
# Typechecking
lxml-stubs = ">=0.4.0"

View file

@ -15,6 +15,8 @@ name = "synapse"
# tests/benchmarks.
crate-type = ["lib", "cdylib"]
# This is deprecated, see tool.maturin in pyproject.toml.
# It is left here for compatibilty with maturin < 0.15.
[package.metadata.maturin]
# This is where we tell maturin where to place the built library.
name = "synapse.synapse_rust"

View file

@ -197,6 +197,7 @@ fn bench_eval_message(b: &mut Bencher) {
false,
false,
false,
false,
);
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));

102
rust/src/acl/mod.rs Normal file
View file

@ -0,0 +1,102 @@
// Copyright 2023 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! An implementation of Matrix server ACL rules.
use std::net::Ipv4Addr;
use std::str::FromStr;
use anyhow::Error;
use pyo3::prelude::*;
use regex::Regex;
use crate::push::utils::{glob_to_regex, GlobMatchType};
/// Called when registering modules with python.
pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> {
let child_module = PyModule::new(py, "acl")?;
child_module.add_class::<ServerAclEvaluator>()?;
m.add_submodule(child_module)?;
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import acl` work.
py.import("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.acl", child_module)?;
Ok(())
}
#[derive(Debug, Clone)]
#[pyclass(frozen)]
pub struct ServerAclEvaluator {
allow_ip_literals: bool,
allow: Vec<Regex>,
deny: Vec<Regex>,
}
#[pymethods]
impl ServerAclEvaluator {
#[new]
pub fn py_new(
allow_ip_literals: bool,
allow: Vec<&str>,
deny: Vec<&str>,
) -> Result<Self, Error> {
let allow = allow
.iter()
.map(|s| glob_to_regex(s, GlobMatchType::Whole))
.collect::<Result<_, _>>()?;
let deny = deny
.iter()
.map(|s| glob_to_regex(s, GlobMatchType::Whole))
.collect::<Result<_, _>>()?;
Ok(ServerAclEvaluator {
allow_ip_literals,
allow,
deny,
})
}
pub fn server_matches_acl_event(&self, server_name: &str) -> bool {
// first of all, check if literal IPs are blocked, and if so, whether the
// server name is a literal IP
if !self.allow_ip_literals {
// check for ipv6 literals. These start with '['.
if server_name.starts_with('[') {
return false;
}
// check for ipv4 literals. We can just lift the routine from std::net.
if Ipv4Addr::from_str(server_name).is_ok() {
return false;
}
}
// next, check the deny list
if self.deny.iter().any(|e| e.is_match(server_name)) {
return false;
}
// then the allow list.
if self.allow.iter().any(|e| e.is_match(server_name)) {
return true;
}
// everything else should be rejected.
false
}
}

View file

@ -2,6 +2,7 @@ use lazy_static::lazy_static;
use pyo3::prelude::*;
use pyo3_log::ResetHandle;
pub mod acl;
pub mod push;
lazy_static! {
@ -38,6 +39,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(get_rust_file_digest, m)?)?;
m.add_function(wrap_pyfunction!(reset_logging_config, m)?)?;
acl::register_module(py, m)?;
push::register_module(py, m)?;
Ok(())

View file

@ -63,6 +63,19 @@ pub const BASE_PREPEND_OVERRIDE_RULES: &[PushRule] = &[PushRule {
}];
pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
PushRule {
rule_id: Cow::Borrowed("global/override/.org.matrix.msc4028.encrypted_event"),
priority_class: 5,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.encrypted"),
},
))]),
actions: Cow::Borrowed(&[Action::Notify]),
default: true,
default_enabled: false,
},
PushRule {
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),
priority_class: 5,

View file

@ -564,7 +564,7 @@ fn test_requires_room_version_supports_condition() {
};
let rules = PushRules::new(vec![custom_rule]);
result = evaluator.run(
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true),
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false),
None,
None,
);

View file

@ -527,6 +527,7 @@ pub struct FilteredPushRules {
msc1767_enabled: bool,
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
}
#[pymethods]
@ -538,6 +539,7 @@ impl FilteredPushRules {
msc1767_enabled: bool,
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
) -> Self {
Self {
push_rules,
@ -545,6 +547,7 @@ impl FilteredPushRules {
msc1767_enabled,
msc3381_polls_enabled,
msc3664_enabled,
msc4028_push_encrypted_events,
}
}
@ -581,6 +584,12 @@ impl FilteredPushRules {
return false;
}
if !self.msc4028_push_encrypted_events
&& rule.rule_id == "global/override/.org.matrix.msc4028.encrypted_event"
{
return false;
}
true
})
.map(|r| {

View file

@ -36,11 +36,41 @@ import textwrap
import traceback
import unittest.mock
from contextlib import contextmanager
from typing import Any, Callable, Dict, Generator, List, Set, Type, TypeVar
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Generator,
List,
Set,
Type,
TypeVar,
)
from parameterized import parameterized
from pydantic import BaseModel as PydanticBaseModel, conbytes, confloat, conint, constr
from pydantic.typing import get_args
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
)
from pydantic.v1.typing import get_args
else:
from pydantic import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
)
from pydantic.typing import get_args
from typing_extensions import ParamSpec
logger = logging.getLogger(__name__)
@ -251,7 +281,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr()
"""
)
@ -269,7 +302,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
constr()
"""
)
@ -278,7 +314,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic.types import constr
try:
from pydantic.v1.types import constr
except ImportError:
from pydantic.types import constr
constr()
"""
)
@ -287,8 +326,11 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
import pydantic.types
pydantic.types.constr()
try:
from pydantic.v1 import types as pydantic_types
except ImportError:
from pydantic import types as pydantic_types
pydantic_types.constr()
"""
)
@ -296,7 +338,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(min_length=10)
"""
)
@ -305,7 +350,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=False)
"""
)
@ -314,7 +362,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic():
run_test_snippet(
"""
from pydantic import constr
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=True)
"""
)
@ -323,7 +374,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
x: constr()
"""
)
@ -332,7 +386,10 @@ class TestConstrainedTypesPatch(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import BaseModel, conint
try:
from pydantic.v1 import BaseModel, conint
except ImportError:
from pydantic import BaseModel, conint
class C:
x: conint()
"""
@ -361,7 +418,10 @@ class TestFieldTypeInspection(unittest.TestCase):
run_test_snippet(
f"""
from typing import *
from pydantic import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
@ -388,7 +448,10 @@ class TestFieldTypeInspection(unittest.TestCase):
run_test_snippet(
f"""
from typing import *
from pydantic import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
@ -398,7 +461,10 @@ class TestFieldTypeInspection(unittest.TestCase):
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic.main import BaseModel
try:
from pydantic.v1.main import BaseModel
except ImportError:
from pydantic.main import BaseModel
class C(BaseModel):
f: str
"""

View file

@ -0,0 +1,21 @@
# Copyright 2023 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
class ServerAclEvaluator:
def __init__(
self, allow_ip_literals: bool, allow: List[str], deny: List[str]
) -> None: ...
def server_matches_acl_event(self, server_name: str) -> bool: ...

View file

@ -46,6 +46,7 @@ class FilteredPushRules:
msc1767_enabled: bool,
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
): ...
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...

View file

@ -0,0 +1,26 @@
# Copyright 2023 Maxwell G <maxwell@gtmx.me>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from packaging.version import Version
try:
from pydantic import __version__ as pydantic_version
except ImportError:
import importlib.metadata
pydantic_version = importlib.metadata.version("pydantic")
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
__all__ = ("HAS_PYDANTIC_V2",)

View file

@ -37,7 +37,7 @@ from synapse.api.constants import EduTypes, EventContentFields
from synapse.api.errors import SynapseError
from synapse.api.presence import UserPresenceState
from synapse.events import EventBase, relation_from_event
from synapse.types import JsonDict, RoomID, UserID
from synapse.types import JsonDict, JsonMapping, RoomID, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -191,7 +191,7 @@ FilterEvent = TypeVar("FilterEvent", EventBase, UserPresenceState, JsonDict)
class FilterCollection:
def __init__(self, hs: "HomeServer", filter_json: JsonDict):
def __init__(self, hs: "HomeServer", filter_json: JsonMapping):
self._filter_json = filter_json
room_filter_json = self._filter_json.get("room", {})
@ -219,7 +219,7 @@ class FilterCollection:
def __repr__(self) -> str:
return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
def get_filter_json(self) -> JsonDict:
def get_filter_json(self) -> JsonMapping:
return self._filter_json
def timeline_limit(self) -> int:
@ -313,7 +313,7 @@ class FilterCollection:
class Filter:
def __init__(self, hs: "HomeServer", filter_json: JsonDict):
def __init__(self, hs: "HomeServer", filter_json: JsonMapping):
self._hs = hs
self._store = hs.get_datastores().main
self.filter_json = filter_json

View file

@ -23,7 +23,7 @@ from netaddr import IPSet
from synapse.api.constants import EventTypes
from synapse.events import EventBase
from synapse.types import DeviceListUpdates, JsonDict, UserID
from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, UserID
from synapse.util.caches.descriptors import _CacheContext, cached
if TYPE_CHECKING:
@ -379,8 +379,8 @@ class AppServiceTransaction:
service: ApplicationService,
id: int,
events: Sequence[EventBase],
ephemeral: List[JsonDict],
to_device_messages: List[JsonDict],
ephemeral: List[JsonMapping],
to_device_messages: List[JsonMapping],
one_time_keys_count: TransactionOneTimeKeysCount,
unused_fallback_keys: TransactionUnusedFallbackKeys,
device_list_summary: DeviceListUpdates,

View file

@ -41,7 +41,7 @@ from synapse.events import EventBase
from synapse.events.utils import SerializeEventConfig, serialize_event
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
from synapse.logging import opentracing
from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID
from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, ThirdPartyInstanceID
from synapse.util.caches.response_cache import ResponseCache
if TYPE_CHECKING:
@ -306,8 +306,8 @@ class ApplicationServiceApi(SimpleHttpClient):
self,
service: "ApplicationService",
events: Sequence[EventBase],
ephemeral: List[JsonDict],
to_device_messages: List[JsonDict],
ephemeral: List[JsonMapping],
to_device_messages: List[JsonMapping],
one_time_keys_count: TransactionOneTimeKeysCount,
unused_fallback_keys: TransactionUnusedFallbackKeys,
device_list_summary: DeviceListUpdates,

View file

@ -73,7 +73,7 @@ from synapse.events import EventBase
from synapse.logging.context import run_in_background
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.databases.main import DataStore
from synapse.types import DeviceListUpdates, JsonDict
from synapse.types import DeviceListUpdates, JsonMapping
from synapse.util import Clock
if TYPE_CHECKING:
@ -121,8 +121,8 @@ class ApplicationServiceScheduler:
self,
appservice: ApplicationService,
events: Optional[Collection[EventBase]] = None,
ephemeral: Optional[Collection[JsonDict]] = None,
to_device_messages: Optional[Collection[JsonDict]] = None,
ephemeral: Optional[Collection[JsonMapping]] = None,
to_device_messages: Optional[Collection[JsonMapping]] = None,
device_list_summary: Optional[DeviceListUpdates] = None,
) -> None:
"""
@ -180,9 +180,9 @@ class _ServiceQueuer:
# dict of {service_id: [events]}
self.queued_events: Dict[str, List[EventBase]] = {}
# dict of {service_id: [events]}
self.queued_ephemeral: Dict[str, List[JsonDict]] = {}
self.queued_ephemeral: Dict[str, List[JsonMapping]] = {}
# dict of {service_id: [to_device_message_json]}
self.queued_to_device_messages: Dict[str, List[JsonDict]] = {}
self.queued_to_device_messages: Dict[str, List[JsonMapping]] = {}
# dict of {service_id: [device_list_summary]}
self.queued_device_list_summaries: Dict[str, List[DeviceListUpdates]] = {}
@ -293,8 +293,8 @@ class _ServiceQueuer:
self,
service: ApplicationService,
events: Iterable[EventBase],
ephemerals: Iterable[JsonDict],
to_device_messages: Iterable[JsonDict],
ephemerals: Iterable[JsonMapping],
to_device_messages: Iterable[JsonMapping],
) -> Tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]:
"""
Given a list of the events, ephemeral messages and to-device messages,
@ -364,8 +364,8 @@ class _TransactionController:
self,
service: ApplicationService,
events: Sequence[EventBase],
ephemeral: Optional[List[JsonDict]] = None,
to_device_messages: Optional[List[JsonDict]] = None,
ephemeral: Optional[List[JsonMapping]] = None,
to_device_messages: Optional[List[JsonMapping]] = None,
one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None,
unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None,
device_list_summary: Optional[DeviceListUpdates] = None,

View file

@ -11,10 +11,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Type, TypeVar
from typing import TYPE_CHECKING, Any, Dict, Type, TypeVar
import jsonschema
from pydantic import BaseModel, ValidationError, parse_obj_as
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import BaseModel, ValidationError, parse_obj_as
else:
from pydantic import BaseModel, ValidationError, parse_obj_as
from synapse.config._base import ConfigError
from synapse.types import JsonDict, StrSequence

View file

@ -415,3 +415,7 @@ class ExperimentalConfig(Config):
LimitExceededError.include_retry_after_header = experimental.get(
"msc4041_enabled", False
)
self.msc4028_push_encrypted_events = experimental.get(
"msc4028_push_encrypted_events", False
)

View file

@ -15,10 +15,16 @@
import argparse
import logging
from typing import Any, Dict, List, Optional, Union
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
import attr
from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import BaseModel, Extra, StrictBool, StrictInt, StrictStr
else:
from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr
from synapse.config._base import (
Config,

View file

@ -12,10 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc
from typing import List, Type, Union, cast
from typing import TYPE_CHECKING, List, Type, Union, cast
import jsonschema
from pydantic import Field, StrictBool, StrictStr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import Field, StrictBool, StrictStr
else:
from pydantic import Field, StrictBool, StrictStr
from synapse.api.constants import (
MAX_ALIAS_LENGTH,
@ -33,9 +39,9 @@ from synapse.events.utils import (
CANONICALJSON_MIN_INT,
validate_canonicaljson,
)
from synapse.federation.federation_server import server_matches_acl_event
from synapse.http.servlet import validate_json_object
from synapse.rest.models import RequestBodyModel
from synapse.storage.controllers.state import server_acl_evaluator_from_event
from synapse.types import EventID, JsonDict, RoomID, StrCollection, UserID
@ -100,7 +106,10 @@ class EventValidator:
self._validate_retention(event)
elif event.type == EventTypes.ServerACL:
if not server_matches_acl_event(config.server.server_name, event):
server_acl_evaluator = server_acl_evaluator_from_event(event)
if not server_acl_evaluator.server_matches_acl_event(
config.server.server_name
):
raise SynapseError(
400, "Can't create an ACL event that denies the local server"
)

View file

@ -64,7 +64,7 @@ from synapse.federation.transport.client import SendJoinResponse
from synapse.http.client import is_unknown_endpoint
from synapse.http.types import QueryParams
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace
from synapse.types import JsonDict, UserID, get_domain_from_id
from synapse.types import JsonDict, StrCollection, UserID, get_domain_from_id
from synapse.util.async_helpers import concurrently_execute
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.retryutils import NotRetryingDestination
@ -1704,7 +1704,7 @@ class FederationClient(FederationBase):
async def timestamp_to_event(
self,
*,
destinations: List[str],
destinations: StrCollection,
room_id: str,
timestamp: int,
direction: Direction,

View file

@ -29,10 +29,8 @@ from typing import (
Union,
)
from matrix_common.regex import glob_to_regex
from prometheus_client import Counter, Gauge, Histogram
from twisted.internet.abstract import isIPAddress
from twisted.python import failure
from synapse.api.constants import (
@ -1324,75 +1322,13 @@ class FederationServer(FederationBase):
Raises:
AuthError if the server does not match the ACL
"""
acl_event = await self._storage_controllers.state.get_current_state_event(
room_id, EventTypes.ServerACL, ""
server_acl_evaluator = (
await self._storage_controllers.state.get_server_acl_for_room(room_id)
)
if not acl_event or server_matches_acl_event(server_name, acl_event):
return
raise AuthError(code=403, msg="Server is banned from room")
def server_matches_acl_event(server_name: str, acl_event: EventBase) -> bool:
"""Check if the given server is allowed by the ACL event
Args:
server_name: name of server, without any port part
acl_event: m.room.server_acl event
Returns:
True if this server is allowed by the ACLs
"""
logger.debug("Checking %s against acl %s", server_name, acl_event.content)
# first of all, check if literal IPs are blocked, and if so, whether the
# server name is a literal IP
allow_ip_literals = acl_event.content.get("allow_ip_literals", True)
if not isinstance(allow_ip_literals, bool):
logger.warning("Ignoring non-bool allow_ip_literals flag")
allow_ip_literals = True
if not allow_ip_literals:
# check for ipv6 literals. These start with '['.
if server_name[0] == "[":
return False
# check for ipv4 literals. We can just lift the routine from twisted.
if isIPAddress(server_name):
return False
# next, check the deny list
deny = acl_event.content.get("deny", [])
if not isinstance(deny, (list, tuple)):
logger.warning("Ignoring non-list deny ACL %s", deny)
deny = []
for e in deny:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched deny rule %s", server_name, e)
return False
# then the allow list.
allow = acl_event.content.get("allow", [])
if not isinstance(allow, (list, tuple)):
logger.warning("Ignoring non-list allow ACL %s", allow)
allow = []
for e in allow:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched allow rule %s", server_name, e)
return True
# everything else should be rejected.
# logger.info("%s fell through", server_name)
return False
def _acl_entry_matches(server_name: str, acl_entry: Any) -> bool:
if not isinstance(acl_entry, str):
logger.warning(
"Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)
)
return False
regex = glob_to_regex(acl_entry)
return bool(regex.match(server_name))
if server_acl_evaluator and not server_acl_evaluator.server_matches_acl_event(
server_name
):
raise AuthError(code=403, msg="Server is banned from room")
class FederationHandlerRegistry:

View file

@ -46,6 +46,7 @@ from synapse.storage.databases.main.directory import RoomAliasMapping
from synapse.types import (
DeviceListUpdates,
JsonDict,
JsonMapping,
RoomAlias,
RoomStreamToken,
StreamKeyType,
@ -397,7 +398,7 @@ class ApplicationServicesHandler:
async def _handle_typing(
self, service: ApplicationService, new_token: int
) -> List[JsonDict]:
) -> List[JsonMapping]:
"""
Return the typing events since the given stream token that the given application
service should receive.
@ -432,7 +433,7 @@ class ApplicationServicesHandler:
async def _handle_receipts(
self, service: ApplicationService, new_token: int
) -> List[JsonDict]:
) -> List[JsonMapping]:
"""
Return the latest read receipts that the given application service should receive.
@ -471,7 +472,7 @@ class ApplicationServicesHandler:
service: ApplicationService,
users: Collection[Union[str, UserID]],
new_token: Optional[int],
) -> List[JsonDict]:
) -> List[JsonMapping]:
"""
Return the latest presence updates that the given application service should receive.
@ -491,7 +492,7 @@ class ApplicationServicesHandler:
A list of json dictionaries containing data derived from the presence events
that should be sent to the given application service.
"""
events: List[JsonDict] = []
events: List[JsonMapping] = []
presence_source = self.event_sources.sources.presence
from_key = await self.store.get_type_stream_id_for_appservice(
service, "presence"

View file

@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Mapping, Optional, Tuple
from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Tuple
import attr
from canonicaljson import encode_canonical_json
@ -31,6 +31,7 @@ from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.logging.opentracing import log_kv, set_tag, tag_args, trace
from synapse.types import (
JsonDict,
JsonMapping,
UserID,
get_domain_from_id,
get_verify_key_from_cross_signing_key,
@ -272,11 +273,7 @@ class E2eKeysHandler:
delay_cancellation=True,
)
ret = {"device_keys": results, "failures": failures}
ret.update(cross_signing_keys)
return ret
return {"device_keys": results, "failures": failures, **cross_signing_keys}
@trace
async def _query_devices_for_destination(
@ -408,7 +405,7 @@ class E2eKeysHandler:
@cancellable
async def get_cross_signing_keys_from_cache(
self, query: Iterable[str], from_user_id: Optional[str]
) -> Dict[str, Dict[str, dict]]:
) -> Dict[str, Dict[str, JsonMapping]]:
"""Get cross-signing keys for users from the database
Args:
@ -551,16 +548,13 @@ class E2eKeysHandler:
self.config.federation.allow_device_name_lookup_over_federation
),
)
ret = {"device_keys": res}
# add in the cross-signing keys
cross_signing_keys = await self.get_cross_signing_keys_from_cache(
device_keys_query, None
)
ret.update(cross_signing_keys)
return ret
return {"device_keys": res, **cross_signing_keys}
async def claim_local_one_time_keys(
self,
@ -1127,7 +1121,7 @@ class E2eKeysHandler:
user_id: str,
master_key_id: str,
signed_master_key: JsonDict,
stored_master_key: JsonDict,
stored_master_key: JsonMapping,
devices: Dict[str, Dict[str, JsonDict]],
) -> List["SignatureListItem"]:
"""Check signatures of a user's master key made by their devices.
@ -1278,7 +1272,7 @@ class E2eKeysHandler:
async def _get_e2e_cross_signing_verify_key(
self, user_id: str, key_type: str, from_user_id: Optional[str] = None
) -> Tuple[JsonDict, str, VerifyKey]:
) -> Tuple[JsonMapping, str, VerifyKey]:
"""Fetch locally or remotely query for a cross-signing public key.
First, attempt to fetch the cross-signing public key from storage.
@ -1333,7 +1327,7 @@ class E2eKeysHandler:
self,
user: UserID,
desired_key_type: str,
) -> Optional[Tuple[Dict[str, Any], str, VerifyKey]]:
) -> Optional[Tuple[JsonMapping, str, VerifyKey]]:
"""Queries cross-signing keys for a remote user and saves them to the database
Only the key specified by `key_type` will be returned, while all retrieved keys
@ -1474,7 +1468,7 @@ def _check_device_signature(
user_id: str,
verify_key: VerifyKey,
signed_device: JsonDict,
stored_device: JsonDict,
stored_device: JsonMapping,
) -> None:
"""Check that a signature on a device or cross-signing key is correct and
matches the copy of the device/key that we have stored. Throws an

View file

@ -1538,7 +1538,7 @@ class FederationEventHandler:
logger.exception("Failed to resync device for %s", sender)
async def backfill_event_id(
self, destinations: List[str], room_id: str, event_id: str
self, destinations: StrCollection, room_id: str, event_id: str
) -> PulledPduInfo:
"""Backfill a single event and persist it as a non-outlier which means
we also pull in all of the state and auth events necessary for it.
@ -2342,6 +2342,12 @@ class FederationEventHandler:
# TODO retrieve the previous state, and exclude join -> join transitions
self._notifier.notify_user_joined_room(event.event_id, event.room_id)
# If this is a server ACL event, clear the cache in the storage controller.
if event.type == EventTypes.ServerACL:
self._state_storage_controller.get_server_acl_for_room.invalidate(
(event.room_id,)
)
def _sanity_check_event(self, ev: EventBase) -> None:
"""
Do some early sanity checks of a received event

View file

@ -32,6 +32,7 @@ from synapse.storage.roommember import RoomsForUser
from synapse.streams.config import PaginationConfig
from synapse.types import (
JsonDict,
JsonMapping,
Requester,
RoomStreamToken,
StreamKeyType,
@ -454,7 +455,7 @@ class InitialSyncHandler:
for s in states
]
async def get_receipts() -> List[JsonDict]:
async def get_receipts() -> List[JsonMapping]:
receipts = await self.store.get_linearized_receipts_for_room(
room_id, to_key=now_token.receipt_key
)

View file

@ -1730,6 +1730,11 @@ class EventCreationHandler:
event.event_id, event.room_id
)
if event.type == EventTypes.ServerACL:
self._storage_controllers.state.get_server_acl_for_room.invalidate(
(event.room_id,)
)
await self._maybe_kick_guest_users(event, context)
if event.type == EventTypes.CanonicalAlias:

View file

@ -401,9 +401,9 @@ class BasePresenceHandler(abc.ABC):
states,
)
for destination, host_states in hosts_to_states.items():
for destinations, host_states in hosts_to_states:
await self._federation.send_presence_to_destinations(
host_states, [destination]
host_states, destinations
)
async def send_full_presence_to_users(self, user_ids: StrCollection) -> None:
@ -1000,9 +1000,9 @@ class PresenceHandler(BasePresenceHandler):
list(to_federation_ping.values()),
)
for destination, states in hosts_to_states.items():
for destinations, states in hosts_to_states:
await self._federation_queue.send_presence_to_destinations(
states, [destination]
states, destinations
)
@wrap_as_background_process("handle_presence_timeouts")
@ -2276,7 +2276,7 @@ async def get_interested_remotes(
store: DataStore,
presence_router: PresenceRouter,
states: List[UserPresenceState],
) -> Dict[str, Set[UserPresenceState]]:
) -> List[Tuple[StrCollection, Collection[UserPresenceState]]]:
"""Given a list of presence states figure out which remote servers
should be sent which.
@ -2290,23 +2290,26 @@ async def get_interested_remotes(
Returns:
A map from destinations to presence states to send to that destination.
"""
hosts_and_states: Dict[str, Set[UserPresenceState]] = {}
hosts_and_states: List[Tuple[StrCollection, Collection[UserPresenceState]]] = []
# First we look up the rooms each user is in (as well as any explicit
# subscriptions), then for each distinct room we look up the remote
# hosts in those rooms.
room_ids_to_states, users_to_states = await get_interested_parties(
store, presence_router, states
)
for state in states:
room_ids = await store.get_rooms_for_user(state.user_id)
hosts: Set[str] = set()
for room_id in room_ids:
room_hosts = await store.get_current_hosts_in_room(room_id)
hosts.update(room_hosts)
hosts_and_states.append((hosts, [state]))
for room_id, states in room_ids_to_states.items():
hosts = await store.get_current_hosts_in_room(room_id)
for host in hosts:
hosts_and_states.setdefault(host, set()).update(states)
# Ask a presence routing module for any additional parties if one
# is loaded.
router_users_to_states = await presence_router.get_users_for_states(states)
for user_id, states in users_to_states.items():
for user_id, user_states in router_users_to_states.items():
host = get_domain_from_id(user_id)
hosts_and_states.setdefault(host, set()).update(states)
hosts_and_states.append(([host], user_states))
return hosts_and_states

View file

@ -19,6 +19,7 @@ from synapse.appservice import ApplicationService
from synapse.streams import EventSource
from synapse.types import (
JsonDict,
JsonMapping,
ReadReceipt,
StreamKeyType,
UserID,
@ -204,15 +205,15 @@ class ReceiptsHandler:
await self.federation_sender.send_read_receipt(receipt)
class ReceiptEventSource(EventSource[int, JsonDict]):
class ReceiptEventSource(EventSource[int, JsonMapping]):
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastores().main
self.config = hs.config
@staticmethod
def filter_out_private_receipts(
rooms: Sequence[JsonDict], user_id: str
) -> List[JsonDict]:
rooms: Sequence[JsonMapping], user_id: str
) -> List[JsonMapping]:
"""
Filters a list of serialized receipts (as returned by /sync and /initialSync)
and removes private read receipts of other users.
@ -229,7 +230,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
The same as rooms, but filtered.
"""
result = []
result: List[JsonMapping] = []
# Iterate through each room's receipt content.
for room in rooms:
@ -282,7 +283,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
room_ids: Iterable[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
) -> Tuple[List[JsonDict], int]:
) -> Tuple[List[JsonMapping], int]:
from_key = int(from_key)
to_key = self.get_current_key()
@ -301,7 +302,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
async def get_new_events_as(
self, from_key: int, to_key: int, service: ApplicationService
) -> Tuple[List[JsonDict], int]:
) -> Tuple[List[JsonMapping], int]:
"""Returns a set of new read receipt events that an appservice
may be interested in.

View file

@ -13,7 +13,17 @@
# limitations under the License.
import enum
import logging
from typing import TYPE_CHECKING, Collection, Dict, FrozenSet, Iterable, List, Optional
from typing import (
TYPE_CHECKING,
Collection,
Dict,
FrozenSet,
Iterable,
List,
Mapping,
Optional,
Sequence,
)
import attr
@ -245,7 +255,7 @@ class RelationsHandler:
async def get_references_for_events(
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
) -> Dict[str, List[_RelatedEvent]]:
) -> Mapping[str, Sequence[_RelatedEvent]]:
"""Get a list of references to the given events.
Args:

View file

@ -235,7 +235,7 @@ class SyncResult:
archived: List[ArchivedSyncResult]
to_device: List[JsonDict]
device_lists: DeviceListUpdates
device_one_time_keys_count: JsonDict
device_one_time_keys_count: JsonMapping
device_unused_fallback_key_types: List[str]
def __bool__(self) -> bool:
@ -1558,7 +1558,7 @@ class SyncHandler:
logger.debug("Fetching OTK data")
device_id = sync_config.device_id
one_time_keys_count: JsonDict = {}
one_time_keys_count: JsonMapping = {}
unused_fallback_key_types: List[str] = []
if device_id:
# TODO: We should have a way to let clients differentiate between the states of:

View file

@ -26,7 +26,14 @@ from synapse.metrics.background_process_metrics import (
)
from synapse.replication.tcp.streams import TypingStream
from synapse.streams import EventSource
from synapse.types import JsonDict, Requester, StrCollection, StreamKeyType, UserID
from synapse.types import (
JsonDict,
JsonMapping,
Requester,
StrCollection,
StreamKeyType,
UserID,
)
from synapse.util.caches.stream_change_cache import StreamChangeCache
from synapse.util.metrics import Measure
from synapse.util.retryutils import filter_destinations_by_retry_limiter
@ -487,7 +494,7 @@ class TypingWriterHandler(FollowerTypingHandler):
raise Exception("Typing writer instance got typing info over replication")
class TypingNotificationEventSource(EventSource[int, JsonDict]):
class TypingNotificationEventSource(EventSource[int, JsonMapping]):
def __init__(self, hs: "HomeServer"):
self._main_store = hs.get_datastores().main
self.clock = hs.get_clock()
@ -497,7 +504,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
#
self.get_typing_handler = hs.get_typing_handler
def _make_event_for(self, room_id: str) -> JsonDict:
def _make_event_for(self, room_id: str) -> JsonMapping:
typing = self.get_typing_handler()._room_typing[room_id]
return {
"type": EduTypes.TYPING,
@ -507,7 +514,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
async def get_new_events_as(
self, from_key: int, service: ApplicationService
) -> Tuple[List[JsonDict], int]:
) -> Tuple[List[JsonMapping], int]:
"""Returns a set of new typing events that an appservice
may be interested in.
@ -551,7 +558,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
room_ids: Iterable[str],
is_guest: bool,
explicit_room_id: Optional[str] = None,
) -> Tuple[List[JsonDict], int]:
) -> Tuple[List[JsonMapping], int]:
with Measure(self.clock, "typing.get_new_events"):
from_key = int(from_key)
handler = self.get_typing_handler()

View file

@ -28,8 +28,15 @@ from typing import (
overload,
)
from pydantic import BaseModel, MissingError, PydanticValueError, ValidationError
from pydantic.error_wrappers import ErrorWrapper
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import BaseModel, MissingError, PydanticValueError, ValidationError
from pydantic.v1.error_wrappers import ErrorWrapper
else:
from pydantic import BaseModel, MissingError, PydanticValueError, ValidationError
from pydantic.error_wrappers import ErrorWrapper
from typing_extensions import Literal
from twisted.web.server import Request

View file

@ -50,6 +50,39 @@ TEXT_CONTENT_TYPES = [
"text/xml",
]
# A list of all content types that are "safe" to be rendered inline in a browser.
INLINE_CONTENT_TYPES = [
"text/css",
"text/plain",
"text/csv",
"application/json",
"application/ld+json",
# We allow some media files deemed as safe, which comes from the matrix-react-sdk.
# https://github.com/matrix-org/matrix-react-sdk/blob/a70fcfd0bcf7f8c85986da18001ea11597989a7c/src/utils/blobs.ts#L51
# SVGs are *intentionally* omitted.
"image/jpeg",
"image/gif",
"image/png",
"image/apng",
"image/webp",
"image/avif",
"video/mp4",
"video/webm",
"video/ogg",
"video/quicktime",
"audio/mp4",
"audio/webm",
"audio/aac",
"audio/mpeg",
"audio/ogg",
"audio/wave",
"audio/wav",
"audio/x-wav",
"audio/x-pn-wav",
"audio/flac",
"audio/x-flac",
]
def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
"""Parses the server name, media ID and optional file name from the request URI
@ -153,8 +186,13 @@ def add_file_headers(
request.setHeader(b"Content-Type", content_type.encode("UTF-8"))
# Use a Content-Disposition of attachment to force download of media.
disposition = "attachment"
# A strict subset of content types is allowed to be inlined so that they may
# be viewed directly in a browser. Other file types are forced to be downloads.
if media_type.lower() in INLINE_CONTENT_TYPES:
disposition = "inline"
else:
disposition = "attachment"
if upload_name:
# RFC6266 section 4.1 [1] defines both `filename` and `filename*`.
#

View file

@ -131,7 +131,7 @@ class BulkPushRuleEvaluator:
async def _get_rules_for_event(
self,
event: EventBase,
) -> Dict[str, FilteredPushRules]:
) -> Mapping[str, FilteredPushRules]:
"""Get the push rules for all users who may need to be notified about
the event.

View file

@ -205,6 +205,12 @@ class ReplicationDataHandler:
self.notifier.notify_user_joined_room(
row.data.event_id, row.data.room_id
)
# If this is a server ACL event, clear the cache in the storage controller.
if row.data.type == EventTypes.ServerACL:
self._state_storage_controller.get_server_acl_for_room.invalidate(
(row.data.room_id,)
)
elif stream_name == UnPartialStatedRoomStream.NAME:
for row in rows:
assert isinstance(row, UnPartialStatedRoomStreamRow)
@ -333,7 +339,7 @@ class ReplicationDataHandler:
try:
await make_deferred_yieldable(deferred)
except defer.TimeoutError:
logger.error(
logger.warning(
"Timed out waiting for repl stream %r to reach %s (%s)"
"; currently at: %s",
stream_name,

View file

@ -16,7 +16,6 @@
# limitations under the License.
import logging
import platform
from http import HTTPStatus
from typing import TYPE_CHECKING, Optional, Tuple
@ -107,10 +106,7 @@ class VersionServlet(RestServlet):
PATTERNS = admin_patterns("/server_version$")
def __init__(self, hs: "HomeServer"):
self.res = {
"server_version": SYNAPSE_VERSION,
"python_version": platform.python_version(),
}
self.res = {"server_version": SYNAPSE_VERSION}
def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
return HTTPStatus.OK, self.res

View file

@ -18,7 +18,12 @@ import random
from typing import TYPE_CHECKING, List, Optional, Tuple
from urllib.parse import urlparse
from pydantic import StrictBool, StrictStr, constr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import StrictBool, StrictStr, constr
else:
from pydantic import StrictBool, StrictStr, constr
from typing_extensions import Literal
from twisted.web.server import Request

View file

@ -17,7 +17,12 @@ import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, List, Optional, Tuple
from pydantic import Extra, StrictStr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import Extra, StrictStr
else:
from pydantic import Extra, StrictStr
from synapse.api import errors
from synapse.api.errors import NotFoundError, SynapseError, UnrecognizedRequestError

View file

@ -15,7 +15,13 @@
import logging
from typing import TYPE_CHECKING, List, Optional, Tuple
from pydantic import StrictStr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import StrictStr
else:
from pydantic import StrictStr
from typing_extensions import Literal
from twisted.web.server import Request

View file

@ -19,7 +19,7 @@ from synapse.api.errors import AuthError, NotFoundError, StoreError, SynapseErro
from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet, parse_json_object_from_request
from synapse.http.site import SynapseRequest
from synapse.types import JsonDict, UserID
from synapse.types import JsonDict, JsonMapping, UserID
from ._base import client_patterns, set_timeline_upper_limit
@ -41,7 +41,7 @@ class GetFilterRestServlet(RestServlet):
async def on_GET(
self, request: SynapseRequest, user_id: str, filter_id: str
) -> Tuple[int, JsonDict]:
) -> Tuple[int, JsonMapping]:
target_user = UserID.from_string(user_id)
requester = await self.auth.get_user_by_req(request)

View file

@ -13,7 +13,12 @@
# limitations under the License.
from typing import TYPE_CHECKING, Dict, Optional
from pydantic import Extra, StrictInt, StrictStr, constr, validator
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import Extra, StrictInt, StrictStr, constr, validator
else:
from pydantic import Extra, StrictInt, StrictStr, constr, validator
from synapse.rest.models import RequestBodyModel
from synapse.util.threepids import validate_email

View file

@ -16,7 +16,13 @@ import logging
import re
from typing import TYPE_CHECKING, Dict, Mapping, Optional, Set, Tuple
from pydantic import Extra, StrictInt, StrictStr
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import Extra, StrictInt, StrictStr
else:
from pydantic import StrictInt, StrictStr, Extra
from signedjson.sign import sign_json
from twisted.web.server import Request

View file

@ -1,4 +1,24 @@
from pydantic import BaseModel, Extra
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import BaseModel, Extra
else:
from pydantic import BaseModel, Extra
class RequestBodyModel(BaseModel):

View file

@ -23,7 +23,6 @@ from typing import (
Generator,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
@ -269,7 +268,7 @@ async def _get_power_level_for_sender(
async def _get_auth_chain_difference(
room_id: str,
state_sets: Sequence[Mapping[Any, str]],
state_sets: Sequence[StateMap[str]],
unpersisted_events: Dict[str, EventBase],
state_res_store: StateResolutionStore,
) -> Set[str]:
@ -405,7 +404,7 @@ def _seperate(
# mypy doesn't understand that discarding None above means that conflicted
# state is StateMap[Set[str]], not StateMap[Set[Optional[Str]]].
return unconflicted_state, conflicted_state # type: ignore
return unconflicted_state, conflicted_state # type: ignore[return-value]
def _is_power_event(event: EventBase) -> bool:

View file

@ -31,8 +31,8 @@ from typing import (
)
import attr
from pydantic import BaseModel
from synapse._pydantic_compat import HAS_PYDANTIC_V2
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.engines import PostgresEngine
from synapse.storage.types import Connection, Cursor
@ -41,6 +41,11 @@ from synapse.util import Clock, json_encoder
from . import engines
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import BaseModel
else:
from pydantic import BaseModel
if TYPE_CHECKING:
from synapse.server import HomeServer
from synapse.storage.database import DatabasePool, LoggingTransaction

View file

@ -37,6 +37,7 @@ from synapse.storage.util.partial_state_events_tracker import (
PartialCurrentStateTracker,
PartialStateEventsTracker,
)
from synapse.synapse_rust.acl import ServerAclEvaluator
from synapse.types import MutableStateMap, StateMap, get_domain_from_id
from synapse.types.state import StateFilter
from synapse.util.async_helpers import Linearizer
@ -501,6 +502,31 @@ class StateStorageController:
return event.content.get("alias")
@cached()
async def get_server_acl_for_room(
self, room_id: str
) -> Optional[ServerAclEvaluator]:
"""Get the server ACL evaluator for room, if any
This does up-front parsing of the content to ignore bad data and pre-compile
regular expressions.
Args:
room_id: The room ID
Returns:
The server ACL evaluator, if any
"""
acl_event = await self.get_current_state_event(
room_id, EventTypes.ServerACL, ""
)
if not acl_event:
return None
return server_acl_evaluator_from_event(acl_event)
@trace
@tag_args
async def get_current_state_deltas(
@ -582,7 +608,7 @@ class StateStorageController:
@trace
@tag_args
async def get_current_hosts_in_room_ordered(self, room_id: str) -> List[str]:
async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]:
"""Get current hosts in room based on current state.
Blocks until we have full state for the given room. This only happens for rooms
@ -760,3 +786,36 @@ class StateStorageController:
cache.state_group = object()
return frozenset(cache.hosts_to_joined_users)
def server_acl_evaluator_from_event(acl_event: EventBase) -> "ServerAclEvaluator":
"""
Create a ServerAclEvaluator from a m.room.server_acl event's content.
This does up-front parsing of the content to ignore bad data. It then creates
the ServerAclEvaluator which will pre-compile regular expressions from the globs.
"""
# first of all, parse if literal IPs are blocked.
allow_ip_literals = acl_event.content.get("allow_ip_literals", True)
if not isinstance(allow_ip_literals, bool):
logger.warning("Ignoring non-bool allow_ip_literals flag")
allow_ip_literals = True
# next, parse the deny list by ignoring any non-strings.
deny = acl_event.content.get("deny", [])
if not isinstance(deny, (list, tuple)):
logger.warning("Ignoring non-list deny ACL %s", deny)
deny = []
else:
deny = [s for s in deny if isinstance(s, str)]
# then the allow list.
allow = acl_event.content.get("allow", [])
if not isinstance(allow, (list, tuple)):
logger.warning("Ignoring non-list allow ACL %s", allow)
allow = []
else:
allow = [s for s in allow if isinstance(s, str)]
return ServerAclEvaluator(allow_ip_literals, allow, deny)

View file

@ -361,19 +361,7 @@ class LoggingTransaction:
@property
def description(
self,
) -> Optional[
Sequence[
Tuple[
str,
Optional[Any],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[int],
]
]
]:
) -> Optional[Sequence[Any]]:
return self.txn.description
def execute_batch(self, sql: str, args: Iterable[Iterable[Any]]) -> None:

View file

@ -45,7 +45,7 @@ from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
from synapse.storage.types import Cursor
from synapse.storage.util.sequence import build_sequence_generator
from synapse.types import DeviceListUpdates, JsonDict
from synapse.types import DeviceListUpdates, JsonMapping
from synapse.util import json_encoder
from synapse.util.caches.descriptors import _CacheContext, cached
@ -268,8 +268,8 @@ class ApplicationServiceTransactionWorkerStore(
self,
service: ApplicationService,
events: Sequence[EventBase],
ephemeral: List[JsonDict],
to_device_messages: List[JsonDict],
ephemeral: List[JsonMapping],
to_device_messages: List[JsonMapping],
one_time_keys_count: TransactionOneTimeKeysCount,
unused_fallback_keys: TransactionUnusedFallbackKeys,
device_list_summary: DeviceListUpdates,

View file

@ -55,7 +55,12 @@ from synapse.storage.util.id_generators import (
AbstractStreamIdGenerator,
StreamIdGenerator,
)
from synapse.types import JsonDict, StrCollection, get_verify_key_from_cross_signing_key
from synapse.types import (
JsonDict,
JsonMapping,
StrCollection,
get_verify_key_from_cross_signing_key,
)
from synapse.util import json_decoder, json_encoder
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.lrucache import LruCache
@ -746,7 +751,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
@cancellable
async def get_user_devices_from_cache(
self, user_ids: Set[str], user_and_device_ids: List[Tuple[str, str]]
) -> Tuple[Set[str], Dict[str, Mapping[str, JsonDict]]]:
) -> Tuple[Set[str], Dict[str, Mapping[str, JsonMapping]]]:
"""Get the devices (and keys if any) for remote users from the cache.
Args:
@ -766,13 +771,13 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
user_ids_not_in_cache = unique_user_ids - user_ids_in_cache
# First fetch all the users which all devices are to be returned.
results: Dict[str, Mapping[str, JsonDict]] = {}
results: Dict[str, Mapping[str, JsonMapping]] = {}
for user_id in user_ids:
if user_id in user_ids_in_cache:
results[user_id] = await self.get_cached_devices_for_user(user_id)
# Then fetch all device-specific requests, but skip users we've already
# fetched all devices for.
device_specific_results: Dict[str, Dict[str, JsonDict]] = {}
device_specific_results: Dict[str, Dict[str, JsonMapping]] = {}
for user_id, device_id in user_and_device_ids:
if user_id in user_ids_in_cache and user_id not in user_ids:
device = await self._get_cached_user_device(user_id, device_id)
@ -801,7 +806,9 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
return user_ids_in_cache
@cached(num_args=2, tree=True)
async def _get_cached_user_device(self, user_id: str, device_id: str) -> JsonDict:
async def _get_cached_user_device(
self, user_id: str, device_id: str
) -> JsonMapping:
content = await self.db_pool.simple_select_one_onecol(
table="device_lists_remote_cache",
keyvalues={"user_id": user_id, "device_id": device_id},
@ -811,7 +818,9 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
return db_to_json(content)
@cached()
async def get_cached_devices_for_user(self, user_id: str) -> Mapping[str, JsonDict]:
async def get_cached_devices_for_user(
self, user_id: str
) -> Mapping[str, JsonMapping]:
devices = await self.db_pool.simple_select_list(
table="device_lists_remote_cache",
keyvalues={"user_id": user_id},
@ -1042,7 +1051,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
)
async def get_device_list_last_stream_id_for_remotes(
self, user_ids: Iterable[str]
) -> Dict[str, Optional[str]]:
) -> Mapping[str, Optional[str]]:
rows = await self.db_pool.simple_select_many_batch(
table="device_lists_remote_extremeties",
column="user_id",

View file

@ -52,7 +52,7 @@ from synapse.storage.database import (
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import StreamIdGenerator
from synapse.types import JsonDict
from synapse.types import JsonDict, JsonMapping
from synapse.util import json_decoder, json_encoder
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.cancellation import cancellable
@ -125,7 +125,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
async def get_e2e_device_keys_for_federation_query(
self, user_id: str
) -> Tuple[int, List[JsonDict]]:
) -> Tuple[int, Sequence[JsonMapping]]:
"""Get all devices (with any device keys) for a user
Returns:
@ -174,7 +174,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
@cached(iterable=True)
async def _get_e2e_device_keys_for_federation_query_inner(
self, user_id: str
) -> List[JsonDict]:
) -> Sequence[JsonMapping]:
"""Get all devices (with any device keys) for a user"""
devices = await self.get_e2e_device_keys_and_signatures([(user_id, None)])
@ -578,7 +578,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
@cached(max_entries=10000)
async def count_e2e_one_time_keys(
self, user_id: str, device_id: str
) -> Dict[str, int]:
) -> Mapping[str, int]:
"""Count the number of one time keys the server has for a device
Returns:
A mapping from algorithm to number of keys for that algorithm.
@ -812,7 +812,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
async def get_e2e_cross_signing_key(
self, user_id: str, key_type: str, from_user_id: Optional[str] = None
) -> Optional[JsonDict]:
) -> Optional[JsonMapping]:
"""Returns a user's cross-signing key.
Args:
@ -833,7 +833,9 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
return user_keys.get(key_type)
@cached(num_args=1)
def _get_bare_e2e_cross_signing_keys(self, user_id: str) -> Mapping[str, JsonDict]:
def _get_bare_e2e_cross_signing_keys(
self, user_id: str
) -> Mapping[str, JsonMapping]:
"""Dummy function. Only used to make a cache for
_get_bare_e2e_cross_signing_keys_bulk.
"""
@ -846,7 +848,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
)
async def _get_bare_e2e_cross_signing_keys_bulk(
self, user_ids: Iterable[str]
) -> Dict[str, Optional[Mapping[str, JsonDict]]]:
) -> Mapping[str, Optional[Mapping[str, JsonMapping]]]:
"""Returns the cross-signing keys for a set of users. The output of this
function should be passed to _get_e2e_cross_signing_signatures_txn if
the signatures for the calling user need to be fetched.
@ -860,15 +862,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
their user ID will map to None.
"""
result = await self.db_pool.runInteraction(
return await self.db_pool.runInteraction(
"get_bare_e2e_cross_signing_keys_bulk",
self._get_bare_e2e_cross_signing_keys_bulk_txn,
user_ids,
)
# The `Optional` comes from the `@cachedList` decorator.
return cast(Dict[str, Optional[Mapping[str, JsonDict]]], result)
def _get_bare_e2e_cross_signing_keys_bulk_txn(
self,
txn: LoggingTransaction,
@ -1026,7 +1025,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
@cancellable
async def get_e2e_cross_signing_keys_bulk(
self, user_ids: List[str], from_user_id: Optional[str] = None
) -> Dict[str, Optional[Mapping[str, JsonDict]]]:
) -> Mapping[str, Optional[Mapping[str, JsonMapping]]]:
"""Returns the cross-signing keys for a set of users.
Args:
@ -1043,7 +1042,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
if from_user_id:
result = cast(
Dict[str, Optional[Mapping[str, JsonDict]]],
Dict[str, Optional[Mapping[str, JsonMapping]]],
await self.db_pool.runInteraction(
"get_e2e_cross_signing_signatures",
self._get_e2e_cross_signing_signatures_txn,

View file

@ -24,6 +24,7 @@ from typing import (
Dict,
Iterable,
List,
Mapping,
MutableMapping,
Optional,
Set,
@ -1633,7 +1634,7 @@ class EventsWorkerStore(SQLBaseStore):
self,
room_id: str,
event_ids: Collection[str],
) -> Dict[str, bool]:
) -> Mapping[str, bool]:
"""Helper for have_seen_events
Returns:
@ -2329,7 +2330,7 @@ class EventsWorkerStore(SQLBaseStore):
@cachedList(cached_method_name="is_partial_state_event", list_name="event_ids")
async def get_partial_state_events(
self, event_ids: Collection[str]
) -> Dict[str, bool]:
) -> Mapping[str, bool]:
"""Checks which of the given events have partial state
Args:

View file

@ -25,7 +25,7 @@ from synapse.storage.database import (
LoggingTransaction,
)
from synapse.storage.engines import PostgresEngine
from synapse.types import JsonDict, UserID
from synapse.types import JsonDict, JsonMapping, UserID
from synapse.util.caches.descriptors import cached
if TYPE_CHECKING:
@ -145,7 +145,7 @@ class FilteringWorkerStore(SQLBaseStore):
@cached(num_args=2)
async def get_user_filter(
self, user_id: UserID, filter_id: Union[int, str]
) -> JsonDict:
) -> JsonMapping:
# filter_id is BIGINT UNSIGNED, so if it isn't a number, fail
# with a coherent error message rather than 500 M_UNKNOWN.
try:

View file

@ -16,7 +16,7 @@
import itertools
import json
import logging
from typing import Dict, Iterable, Optional, Tuple
from typing import Dict, Iterable, Mapping, Optional, Tuple
from canonicaljson import encode_canonical_json
from signedjson.key import decode_verify_key_bytes
@ -130,7 +130,7 @@ class KeyStore(CacheInvalidationWorkerStore):
)
async def get_server_keys_json(
self, server_name_and_key_ids: Iterable[Tuple[str, str]]
) -> Dict[Tuple[str, str], FetchKeyResult]:
) -> Mapping[Tuple[str, str], FetchKeyResult]:
"""
Args:
server_name_and_key_ids:
@ -200,7 +200,7 @@ class KeyStore(CacheInvalidationWorkerStore):
)
async def get_server_keys_json_for_remote(
self, server_name: str, key_ids: Iterable[str]
) -> Dict[str, Optional[FetchKeyResultForRemote]]:
) -> Mapping[str, Optional[FetchKeyResultForRemote]]:
"""Fetch the cached keys for the given server/key IDs.
If we have multiple entries for a given key ID, returns the most recent.

View file

@ -11,7 +11,17 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, cast
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterable,
List,
Mapping,
Optional,
Tuple,
cast,
)
from synapse.api.presence import PresenceState, UserPresenceState
from synapse.replication.tcp.streams import PresenceStream
@ -249,7 +259,7 @@ class PresenceStore(PresenceBackgroundUpdateStore, CacheInvalidationWorkerStore)
)
async def get_presence_for_users(
self, user_ids: Iterable[str]
) -> Dict[str, UserPresenceState]:
) -> Mapping[str, UserPresenceState]:
rows = await self.db_pool.simple_select_many_batch(
table="presence_stream",
column="user_id",

View file

@ -88,6 +88,7 @@ def _load_rules(
msc1767_enabled=experimental_config.msc1767_enabled,
msc3664_enabled=experimental_config.msc3664_enabled,
msc3381_polls_enabled=experimental_config.msc3381_polls_enabled,
msc4028_push_encrypted_events=experimental_config.msc4028_push_encrypted_events,
)
return filtered_rules
@ -216,7 +217,7 @@ class PushRulesWorkerStore(
@cachedList(cached_method_name="get_push_rules_for_user", list_name="user_ids")
async def bulk_get_push_rules(
self, user_ids: Collection[str]
) -> Dict[str, FilteredPushRules]:
) -> Mapping[str, FilteredPushRules]:
if not user_ids:
return {}

View file

@ -43,7 +43,7 @@ from synapse.storage.util.id_generators import (
MultiWriterIdGenerator,
StreamIdGenerator,
)
from synapse.types import JsonDict
from synapse.types import JsonDict, JsonMapping
from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.stream_change_cache import StreamChangeCache
@ -218,7 +218,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
@cached()
async def _get_receipts_for_user_with_orderings(
self, user_id: str, receipt_type: str
) -> JsonDict:
) -> JsonMapping:
"""
Fetch receipts for all rooms that the given user is joined to.
@ -258,7 +258,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
async def get_linearized_receipts_for_rooms(
self, room_ids: Iterable[str], to_key: int, from_key: Optional[int] = None
) -> List[dict]:
) -> List[JsonMapping]:
"""Get receipts for multiple rooms for sending to clients.
Args:
@ -287,7 +287,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
async def get_linearized_receipts_for_room(
self, room_id: str, to_key: int, from_key: Optional[int] = None
) -> Sequence[JsonDict]:
) -> Sequence[JsonMapping]:
"""Get receipts for a single room for sending to clients.
Args:
@ -310,7 +310,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
@cached(tree=True)
async def _get_linearized_receipts_for_room(
self, room_id: str, to_key: int, from_key: Optional[int] = None
) -> Sequence[JsonDict]:
) -> Sequence[JsonMapping]:
"""See get_linearized_receipts_for_room"""
def f(txn: LoggingTransaction) -> List[Dict[str, Any]]:
@ -353,7 +353,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
)
async def _get_linearized_receipts_for_rooms(
self, room_ids: Collection[str], to_key: int, from_key: Optional[int] = None
) -> Dict[str, Sequence[JsonDict]]:
) -> Mapping[str, Sequence[JsonMapping]]:
if not room_ids:
return {}
@ -415,7 +415,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
)
async def get_linearized_receipts_for_all_rooms(
self, to_key: int, from_key: Optional[int] = None
) -> Mapping[str, JsonDict]:
) -> Mapping[str, JsonMapping]:
"""Get receipts for all rooms between two stream_ids, up
to a limit of the latest 100 read receipts.

View file

@ -465,7 +465,7 @@ class RelationsWorkerStore(SQLBaseStore):
@cachedList(cached_method_name="get_references_for_event", list_name="event_ids")
async def get_references_for_events(
self, event_ids: Collection[str]
) -> Mapping[str, Optional[List[_RelatedEvent]]]:
) -> Mapping[str, Optional[Sequence[_RelatedEvent]]]:
"""Get a list of references to the given events.
Args:
@ -519,7 +519,7 @@ class RelationsWorkerStore(SQLBaseStore):
@cachedList(cached_method_name="get_applicable_edit", list_name="event_ids")
async def get_applicable_edits(
self, event_ids: Collection[str]
) -> Dict[str, Optional[EventBase]]:
) -> Mapping[str, Optional[EventBase]]:
"""Get the most recent edit (if any) that has happened for the given
events.
@ -605,7 +605,7 @@ class RelationsWorkerStore(SQLBaseStore):
@cachedList(cached_method_name="get_thread_summary", list_name="event_ids")
async def get_thread_summaries(
self, event_ids: Collection[str]
) -> Dict[str, Optional[Tuple[int, EventBase]]]:
) -> Mapping[str, Optional[Tuple[int, EventBase]]]:
"""Get the number of threaded replies and the latest reply (if any) for the given events.
Args:
@ -779,7 +779,7 @@ class RelationsWorkerStore(SQLBaseStore):
@cachedList(cached_method_name="get_thread_participated", list_name="event_ids")
async def get_threads_participated(
self, event_ids: Collection[str], user_id: str
) -> Dict[str, bool]:
) -> Mapping[str, bool]:
"""Get whether the requesting user participated in the given threads.
This is separate from get_thread_summaries since that can be cached across
@ -931,7 +931,7 @@ class RelationsWorkerStore(SQLBaseStore):
room_id: str,
limit: int = 5,
from_token: Optional[ThreadsNextBatch] = None,
) -> Tuple[List[str], Optional[ThreadsNextBatch]]:
) -> Tuple[Sequence[str], Optional[ThreadsNextBatch]]:
"""Get a list of thread IDs, ordered by topological ordering of their
latest reply.

View file

@ -191,7 +191,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
)
async def get_subset_users_in_room_with_profiles(
self, room_id: str, user_ids: Collection[str]
) -> Dict[str, ProfileInfo]:
) -> Mapping[str, ProfileInfo]:
"""Get a mapping from user ID to profile information for a list of users
in a given room.
@ -676,7 +676,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
)
async def _get_rooms_for_users(
self, user_ids: Collection[str]
) -> Dict[str, FrozenSet[str]]:
) -> Mapping[str, FrozenSet[str]]:
"""A batched version of `get_rooms_for_user`.
Returns:
@ -881,7 +881,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
)
async def _get_user_ids_from_membership_event_ids(
self, event_ids: Iterable[str]
) -> Dict[str, Optional[str]]:
) -> Mapping[str, Optional[str]]:
"""For given set of member event_ids check if they point to a join
event.
@ -984,7 +984,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
)
@cached(iterable=True, max_entries=10000)
async def get_current_hosts_in_room_ordered(self, room_id: str) -> List[str]:
async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]:
"""
Get current hosts in room based on current state.
@ -1013,12 +1013,14 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
# `get_users_in_room` rather than funky SQL.
domains = await self.get_current_hosts_in_room(room_id)
return list(domains)
return tuple(domains)
# For PostgreSQL we can use a regex to pull out the domains from the
# joined users in `current_state_events` via regex.
def get_current_hosts_in_room_ordered_txn(txn: LoggingTransaction) -> List[str]:
def get_current_hosts_in_room_ordered_txn(
txn: LoggingTransaction,
) -> Tuple[str, ...]:
# Returns a list of servers currently joined in the room sorted by
# longest in the room first (aka. with the lowest depth). The
# heuristic of sorting by servers who have been in the room the
@ -1043,7 +1045,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
"""
txn.execute(sql, (room_id,))
# `server_domain` will be `NULL` for malformed MXIDs with no colons.
return [d for d, in txn if d is not None]
return tuple(d for d, in txn if d is not None)
return await self.db_pool.runInteraction(
"get_current_hosts_in_room_ordered", get_current_hosts_in_room_ordered_txn
@ -1191,7 +1193,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
)
async def get_membership_from_event_ids(
self, member_event_ids: Iterable[str]
) -> Dict[str, Optional[EventIdMembership]]:
) -> Mapping[str, Optional[EventIdMembership]]:
"""Get user_id and membership of a set of event IDs.
Returns:

Some files were not shown because too many files have changed in this diff Show more