Merge branch 'develop' into madlittlemods/docker-workers-prometheus-service-discovery

This commit is contained in:
Eric Eastwood 2026-01-08 12:55:18 -06:00
commit 770cb3313a
113 changed files with 1483 additions and 1313 deletions

View file

@ -75,7 +75,7 @@ jobs:
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: digests-${{ matrix.suffix }}
path: ${{ runner.temp }}/digests/*
@ -95,7 +95,7 @@ jobs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*

View file

@ -21,7 +21,7 @@ jobs:
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
mdbook-version: '0.5.2'
- name: Setup python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
@ -39,7 +39,7 @@ jobs:
cp book/welcome_and_overview.html book/index.html
- name: Upload Artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: book
path: book
@ -55,7 +55,7 @@ jobs:
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
mdbook-version: '0.5.2'
- name: Setup htmltest
run: |
@ -64,8 +64,17 @@ jobs:
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
- name: Test links with htmltest
# Build the book with `./` as the site URL (to make checks on 404.html possible)
# Then run htmltest (without checking external links since that involves the network and is slow).
run: |
# Build the book with `./` as the site URL (to make checks on 404.html possible)
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
./htmltest book --skip-external
# Delete the contents of the print.html file, as it can raise false
# positives during link checking.
#
# We empty out the file, instead of deleting it, as doing so would
# just cause htmltest to complain that links to it were invalid.
# Ideally `htmltest` would have an option to ignore specific files
# instead.
echo '<!DOCTYPE HTML>' > book/print.html
./htmltest book --conf docs/.htmltest.yml

View file

@ -58,7 +58,7 @@ jobs:
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
with:
mdbook-version: '0.4.17'
mdbook-version: '0.5.2'
- name: Set version of docs
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js

View file

@ -173,7 +173,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})

View file

@ -66,7 +66,7 @@ jobs:
install: true
- name: Set up docker layer caching
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
@ -101,7 +101,7 @@ jobs:
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
- name: Upload debs as artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
path: debs/*
@ -152,7 +152,7 @@ jobs:
# musl: (TODO: investigate).
CIBW_TEST_SKIP: pp3*-* *musl*
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: Wheel-${{ matrix.os }}
path: ./wheelhouse/*.whl
@ -173,7 +173,7 @@ jobs:
- name: Build sdist
run: python -m build --sdist
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: Sdist
path: dist/*.tar.gz
@ -189,7 +189,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all workflow run artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- name: Build a tarball for the debs
# We need to merge all the debs uploads into one folder, then compress
# that.

View file

@ -174,7 +174,7 @@ jobs:
# Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Restore/persist mypy's cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: |
.mypy_cache
@ -561,7 +561,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
@ -658,7 +658,7 @@ jobs:
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Upload schema differences"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
with:
name: Schema dumps

View file

@ -147,7 +147,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})

View file

@ -1,3 +1,80 @@
# Synapse 1.145.0rc3 (2026-01-07)
No significant changes since 1.145.0rc2.
This RC strips out unnecessary files from the wheels that were added when fixing the source distribution packaging in the previous RC.
# Synapse 1.145.0rc2 (2026-01-07)
No significant changes since 1.145.0rc1.
This RC fixes the source distribution packaging for uploading to PyPI.
# Synapse 1.145.0rc1 (2026-01-06)
## End of Life of Ubuntu 25.04 Plucky Puffin
Ubuntu 25.04 (Plucky Puffin) will be end of life on Jan 17, 2026. Synapse will stop building packages for Ubuntu 25.04 shortly thereafter.
## Updates to Locked Dependencies No Longer Included in Changelog
The "Updates to locked dependencies" section has been removed from the changelog due to lack of use and the maintenance burden. ([\#19254](https://github.com/element-hq/synapse/issues/19254))
## Features
- Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose. ([\#19260](https://github.com/element-hq/synapse/issues/19260))
- Server admins can bypass the quarantine media check when downloading media by setting the `admin_unsafely_bypass_quarantine` query parameter to `true` on Client-Server API media download requests. ([\#19275](https://github.com/element-hq/synapse/issues/19275))
- Implemented pagination for the [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) mutual rooms endpoint. Contributed by @tulir @ Beeper. ([\#19279](https://github.com/element-hq/synapse/issues/19279))
- Admin API: add worker support to `GET /_synapse/admin/v2/users/<user_id>`. ([\#19281](https://github.com/element-hq/synapse/issues/19281))
- Improve proxy support for the `federation_client.py` dev script. Contributed by Denis Kasak (@dkasak). ([\#19300](https://github.com/element-hq/synapse/issues/19300))
## Bugfixes
- Fix sliding sync performance slow down for long lived connections. ([\#19206](https://github.com/element-hq/synapse/issues/19206))
- Fix a bug where Mastodon posts (and possibly other embeds) have the wrong description for URL previews. ([\#19231](https://github.com/element-hq/synapse/issues/19231))
- Fix bug where `Duration` was logged incorrectly. ([\#19267](https://github.com/element-hq/synapse/issues/19267))
- Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2. ([\#19274](https://github.com/element-hq/synapse/issues/19274))
- Transform events with client metadata before serialising in /event response. ([\#19340](https://github.com/element-hq/synapse/issues/19340))
## Updates to the Docker image
- Add a way to expose metrics from the Docker image (`SYNAPSE_ENABLE_METRICS`). ([\#19324](https://github.com/element-hq/synapse/issues/19324))
## Improved Documentation
- Document the importance of `public_baseurl` when configuring OpenID Connect authentication. ([\#19270](https://github.com/element-hq/synapse/issues/19270))
## Deprecations and Removals
- Ubuntu 25.04 (Plucky Puffin) will be end of life on Jan 17, 2026. Synapse will stop building packages for Ubuntu 25.04 shortly thereafter.
- Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden. ([\#19254](https://github.com/element-hq/synapse/issues/19254))
## Internal Changes
- Group together dependabot update PRs to reduce the review load. ([\#18402](https://github.com/element-hq/synapse/issues/18402))
- Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet. ([\#19187](https://github.com/element-hq/synapse/issues/19187))
- Respond with useful error codes with `Content-Length` header/s are invalid. ([\#19212](https://github.com/element-hq/synapse/issues/19212))
- Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`. ([\#19232](https://github.com/element-hq/synapse/issues/19232))
- Switch the build backend from `poetry-core` to `maturin`. ([\#19234](https://github.com/element-hq/synapse/issues/19234))
- Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10. ([\#19253](https://github.com/element-hq/synapse/issues/19253))
- Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks. ([\#19258](https://github.com/element-hq/synapse/issues/19258))
- Drop the broken netlify documentation workflow until a new one is implemented. ([\#19262](https://github.com/element-hq/synapse/issues/19262))
- Don't include debug logs in `Clock` unless explicitly enabled. ([\#19278](https://github.com/element-hq/synapse/issues/19278))
- Use `uv` to test olddeps to ensure all transitive dependencies use minimum versions. ([\#19289](https://github.com/element-hq/synapse/issues/19289))
- Add a config to be able to rate limit search in the user directory. ([\#19291](https://github.com/element-hq/synapse/issues/19291))
- Log the original bind exception when encountering `Failed to listen on 0.0.0.0, continuing because listening on [::]`. ([\#19297](https://github.com/element-hq/synapse/issues/19297))
- Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped. ([\#19302](https://github.com/element-hq/synapse/issues/19302))
- Make it more clear how `shared_extra_conf` is combined in our Docker configuration scripts. ([\#19323](https://github.com/element-hq/synapse/issues/19323))
- Update CI to stream Complement progress and format logs in a separate step after all tests are done. ([\#19326](https://github.com/element-hq/synapse/issues/19326))
- Format `.github/workflows/tests.yml`. ([\#19327](https://github.com/element-hq/synapse/issues/19327))
# Synapse 1.144.0 (2025-12-09)
## Deprecation of MacOS Python wheels

8
Cargo.lock generated
View file

@ -1024,9 +1024,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
version = "0.12.24"
version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64",
"bytes",
@ -1468,9 +1468,9 @@ dependencies = [
[[package]]
name = "tower-http"
version = "0.6.6"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
dependencies = [
"bitflags",
"bytes",

View file

@ -4,7 +4,6 @@
title = "Synapse"
authors = ["The Matrix.org Foundation C.I.C."]
language = "en"
multilingual = false
# The directory that documentation files are stored in
src = "docs"
@ -31,13 +30,10 @@ site-url = "/synapse/"
# Additional HTML, JS, CSS that's injected into each page of the book.
# More information available in docs/website_files/README.md
additional-css = [
"docs/website_files/table-of-contents.css",
"docs/website_files/remove-nav-buttons.css",
"docs/website_files/indent-section-headers.css",
"docs/website_files/version-picker.css",
]
additional-js = [
"docs/website_files/table-of-contents.js",
"docs/website_files/version-picker.js",
"docs/website_files/version.js",
]

View file

@ -1 +0,0 @@
Group together dependabot update PRs to reduce the review load.

View file

@ -1 +0,0 @@
Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet.

View file

@ -0,0 +1 @@
Add a new config option [`enable_local_media_storage`](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#enable_local_media_storage) which controls whether media is additionally stored locally when using configured `media_storage_providers`. Setting this to `false` allows off-site media storage without a local cache. Contributed by Patrice Brend'amour @dr.allgood.

View file

@ -1 +0,0 @@
Fix sliding sync performance slow down for long lived connections.

View file

@ -1 +0,0 @@
Respond with useful error codes with `Content-Length` header/s are invalid.

View file

@ -1 +0,0 @@
Fix a bug where Mastodon posts (and possibly other embeds) have the wrong description for URL previews.

View file

@ -1 +0,0 @@
Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`.

View file

@ -1 +0,0 @@
Switch the build backend from `poetry-core` to `maturin`.

View file

@ -1 +0,0 @@
Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10.

View file

@ -1 +0,0 @@
Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden.

View file

@ -1 +0,0 @@
Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks.

View file

@ -1 +0,0 @@
Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose.

View file

@ -1 +0,0 @@
Drop the broken netlify documentation workflow until a new one is implemented.

View file

@ -1 +0,0 @@
Fix bug where `Duration` was logged incorrectly.

View file

@ -1 +0,0 @@
Add an admin API for retrieving a paginated list of quarantined media.

View file

@ -1 +0,0 @@
Document the importance of `public_baseurl` when configuring OpenID Connect authentication.

View file

@ -0,0 +1 @@
Stabilise support for [MSC4312](https://github.com/matrix-org/matrix-spec-proposals/pull/4312)'s `m.oauth` User-Interactive Auth stage for resetting cross-signing identity with the OAuth 2.0 API. The old, unstable name (`org.matrix.cross_signing_reset`) is now deprecated and will be removed in a future release.

View file

@ -1 +0,0 @@
Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2.

View file

@ -1 +0,0 @@
Server admins can bypass the quarantine media check when downloading media by setting the `admin_unsafely_bypass_quarantine` query parameter to `true` on Client-Server API media download requests.

View file

@ -1 +0,0 @@
Don't include debug logs in `Clock` unless explicitly enabled.

View file

@ -1 +0,0 @@
Implemented pagination for the [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) mutual rooms endpoint. Contributed by @tulir @ Beeper.

View file

@ -1 +0,0 @@
Admin API: add worker support to `GET /_synapse/admin/v2/users/<user_id>`.

View file

@ -1 +0,0 @@
Use `uv` to test olddeps to ensure all transitive dependencies use minimum versions.

View file

@ -1 +0,0 @@
Log the original bind exception when encountering `Failed to listen on 0.0.0.0, continuing because listening on [::]`.

View file

@ -1 +0,0 @@
Improve proxy support for the `federation_client.py` dev script. Contributed by Denis Kasak (@dkasak).

View file

@ -1 +0,0 @@
Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped.

1
changelog.d/19310.misc Normal file
View file

@ -0,0 +1 @@
Add an internal `cancel_task` API to the task scheduler.

View file

@ -1 +0,0 @@
Make it more clear how `shared_extra_conf` is combined in our Docker configuration scripts.

View file

@ -1 +0,0 @@
Add a way to expose metrics from the Docker image (`SYNAPSE_ENABLE_METRICS`).

View file

@ -1 +0,0 @@
Update CI to stream Complement progress and format logs in a separate step after all tests are done.

View file

@ -1 +0,0 @@
Format `.github/workflows/tests.yml`.

1
changelog.d/19335.bugfix Normal file
View file

@ -0,0 +1 @@
Fixed parallel calls to `/_matrix/media/v1/create` being ratelimited for appservices even if `rate_limited: false` was set in the registration. Contributed by @tulir @ Beeper.

1
changelog.d/19341.doc Normal file
View file

@ -0,0 +1 @@
Remove docs on legacy metric names (no longer in the codebase since 2022-12-06).

1
changelog.d/19345.misc Normal file
View file

@ -0,0 +1 @@
Replace usage of deprecated `assertEquals` with `assertEqual` in unit test code.

1
changelog.d/19348.misc Normal file
View file

@ -0,0 +1 @@
Drop support for Ubuntu 25.04 'Plucky Puffin', add support for Ubuntu 25.10 'Questing Quokka'.

1
changelog.d/19351.misc Normal file
View file

@ -0,0 +1 @@
Revert "Add an Admin API endpoint for listing quarantined media (#19268)".

1
changelog.d/19353.bugfix Normal file
View file

@ -0,0 +1 @@
Fix a bug introduced in 1.61.0 where a user's membership in a room was accidentally ignored when considering access to historical state events in rooms with the "shared" history visibility. Contributed by Lukas Tautz.

1
changelog.d/19356.misc Normal file
View file

@ -0,0 +1 @@
Bump `mdbook` from 0.4.17 to 0.5.2 and remove our custom table-of-contents plugin in favour of the new default functionality.

18
debian/changelog vendored
View file

@ -1,3 +1,21 @@
matrix-synapse-py3 (1.145.0~rc3) stable; urgency=medium
* New Synapse release 1.145.0rc3.
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jan 2026 15:32:07 -0700
matrix-synapse-py3 (1.145.0~rc2) stable; urgency=medium
* New Synapse release 1.145.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jan 2026 10:10:07 -0700
matrix-synapse-py3 (1.145.0~rc1) stable; urgency=medium
* New Synapse release 1.145.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jan 2026 09:29:39 -0700
matrix-synapse-py3 (1.144.0) stable; urgency=medium
* New Synapse release 1.144.0.

View file

@ -145,6 +145,12 @@ for port in 8080 8081 8082; do
rc_delayed_event_mgmt:
per_second: 1000
burst_count: 1000
rc_room_creation:
per_second: 1000
burst_count: 1000
rc_user_directory:
per_second: 1000
burst_count: 1000
RC
)
echo "${ratelimiting}" >> "$port.config"

View file

@ -102,6 +102,10 @@ rc_room_creation:
per_second: 9999
burst_count: 9999
rc_user_directory:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999
allow_device_name_lookup_over_federation: true

5
docs/.htmltest.yml Normal file
View file

@ -0,0 +1,5 @@
# Configuration for htmltest, which we run in CI to check that links aren't broken in the built documentation.
# See all config options: https://github.com/wjdp/htmltest#wrench-configuration
# Don't check external links, as that requires network access and is slow.
CheckExternal: false

View file

@ -73,33 +73,6 @@ Response:
}
```
## Listing all quarantined media
This API returns a list of all quarantined media on the server. It is paginated, and can be scoped to either local or
remote media. Note that the pagination values are also scoped to the request parameters - changing them but keeping the
same pagination values will result in unexpected results.
Request:
```http
GET /_synapse/admin/v1/media/quarantined?from=0&limit=100&kind=local
```
`from` and `limit` are optional parameters, and default to `0` and `100` respectively. They are the row index and number
of rows to return - they are not timestamps.
`kind` *MUST* either be `local` or `remote`.
The API returns a JSON body containing MXC URIs for the quarantined media, like the following:
```json
{
"media": [
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
"mxc://localhost/abcdefghijklmnopqrstuvwx"
]
}
```
# Quarantine media
Quarantining media means that it is marked as inaccessible by users. It applies

View file

@ -36,9 +36,10 @@ It returns a JSON body like the following:
- "scheduled" - Task is scheduled but not active
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
- "complete" - Task has completed successfully
- "cancelled" - Task has been cancelled
- "failed" - Task is over and either returned a failed status, or had an exception
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp (in milliseconds since the unix epoch) inferior to the specified one.
**Response**

View file

@ -145,171 +145,3 @@ Example Prometheus target for Synapse with workers:
Labels (`instance`, `job`, `index`) can be defined as anything.
The labels are used to group graphs in grafana.
## Renaming of metrics & deprecation of old names in 1.2
Synapse 1.2 updates the Prometheus metrics to match the naming
convention of the upstream `prometheus_client`. The old names are
considered deprecated and will be removed in a future version of
Synapse.
**The old names will be disabled by default in Synapse v1.71.0 and removed
altogether in Synapse v1.73.0.**
| New Name | Old Name |
| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------- |
| python_gc_objects_collected_total | python_gc_objects_collected |
| python_gc_objects_uncollectable_total | python_gc_objects_uncollectable |
| python_gc_collections_total | python_gc_collections |
| process_cpu_seconds_total | process_cpu_seconds |
| synapse_federation_client_sent_transactions_total | synapse_federation_client_sent_transactions |
| synapse_federation_client_events_processed_total | synapse_federation_client_events_processed |
| synapse_event_processing_loop_count_total | synapse_event_processing_loop_count |
| synapse_event_processing_loop_room_count_total | synapse_event_processing_loop_room_count |
| synapse_util_caches_cache_hits | synapse_util_caches_cache:hits |
| synapse_util_caches_cache_size | synapse_util_caches_cache:size |
| synapse_util_caches_cache_evicted_size | synapse_util_caches_cache:evicted_size |
| synapse_util_caches_cache | synapse_util_caches_cache:total |
| synapse_util_caches_response_cache_size | synapse_util_caches_response_cache:size |
| synapse_util_caches_response_cache_hits | synapse_util_caches_response_cache:hits |
| synapse_util_caches_response_cache_evicted_size | synapse_util_caches_response_cache:evicted_size |
| synapse_util_metrics_block_count_total | synapse_util_metrics_block_count |
| synapse_util_metrics_block_time_seconds_total | synapse_util_metrics_block_time_seconds |
| synapse_util_metrics_block_ru_utime_seconds_total | synapse_util_metrics_block_ru_utime_seconds |
| synapse_util_metrics_block_ru_stime_seconds_total | synapse_util_metrics_block_ru_stime_seconds |
| synapse_util_metrics_block_db_txn_count_total | synapse_util_metrics_block_db_txn_count |
| synapse_util_metrics_block_db_txn_duration_seconds_total | synapse_util_metrics_block_db_txn_duration_seconds |
| synapse_util_metrics_block_db_sched_duration_seconds_total | synapse_util_metrics_block_db_sched_duration_seconds |
| synapse_background_process_start_count_total | synapse_background_process_start_count |
| synapse_background_process_ru_utime_seconds_total | synapse_background_process_ru_utime_seconds |
| synapse_background_process_ru_stime_seconds_total | synapse_background_process_ru_stime_seconds |
| synapse_background_process_db_txn_count_total | synapse_background_process_db_txn_count |
| synapse_background_process_db_txn_duration_seconds_total | synapse_background_process_db_txn_duration_seconds |
| synapse_background_process_db_sched_duration_seconds_total | synapse_background_process_db_sched_duration_seconds |
| synapse_storage_events_persisted_events_total | synapse_storage_events_persisted_events |
| synapse_storage_events_persisted_events_sep_total | synapse_storage_events_persisted_events_sep |
| synapse_storage_events_state_delta_total | synapse_storage_events_state_delta |
| synapse_storage_events_state_delta_single_event_total | synapse_storage_events_state_delta_single_event |
| synapse_storage_events_state_delta_reuse_delta_total | synapse_storage_events_state_delta_reuse_delta |
| synapse_federation_server_received_pdus_total | synapse_federation_server_received_pdus |
| synapse_federation_server_received_edus_total | synapse_federation_server_received_edus |
| synapse_handler_presence_notified_presence_total | synapse_handler_presence_notified_presence |
| synapse_handler_presence_federation_presence_out_total | synapse_handler_presence_federation_presence_out |
| synapse_handler_presence_presence_updates_total | synapse_handler_presence_presence_updates |
| synapse_handler_presence_timers_fired_total | synapse_handler_presence_timers_fired |
| synapse_handler_presence_federation_presence_total | synapse_handler_presence_federation_presence |
| synapse_handler_presence_bump_active_time_total | synapse_handler_presence_bump_active_time |
| synapse_federation_client_sent_edus_total | synapse_federation_client_sent_edus |
| synapse_federation_client_sent_pdu_destinations_count_total | synapse_federation_client_sent_pdu_destinations:count |
| synapse_federation_client_sent_pdu_destinations_total | synapse_federation_client_sent_pdu_destinations:total |
| synapse_handlers_appservice_events_processed_total | synapse_handlers_appservice_events_processed |
| synapse_notifier_notified_events_total | synapse_notifier_notified_events |
| synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total | synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter |
| synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total | synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter |
| synapse_http_httppusher_http_pushes_processed_total | synapse_http_httppusher_http_pushes_processed |
| synapse_http_httppusher_http_pushes_failed_total | synapse_http_httppusher_http_pushes_failed |
| synapse_http_httppusher_badge_updates_processed_total | synapse_http_httppusher_badge_updates_processed |
| synapse_http_httppusher_badge_updates_failed_total | synapse_http_httppusher_badge_updates_failed |
| synapse_admin_mau_current | synapse_admin_mau:current |
| synapse_admin_mau_max | synapse_admin_mau:max |
| synapse_admin_mau_registered_reserved_users | synapse_admin_mau:registered_reserved_users |
Removal of deprecated metrics & time based counters becoming histograms in 0.31.0
---------------------------------------------------------------------------------
The duplicated metrics deprecated in Synapse 0.27.0 have been removed.
All time duration-based metrics have been changed to be seconds. This
affects:
| msec -> sec metrics |
| -------------------------------------- |
| python_gc_time |
| python_twisted_reactor_tick_time |
| synapse_storage_query_time |
| synapse_storage_schedule_time |
| synapse_storage_transaction_time |
Several metrics have been changed to be histograms, which sort entries
into buckets and allow better analysis. The following metrics are now
histograms:
| Altered metrics |
| ------------------------------------------------ |
| python_gc_time |
| python_twisted_reactor_pending_calls |
| python_twisted_reactor_tick_time |
| synapse_http_server_response_time_seconds |
| synapse_storage_query_time |
| synapse_storage_schedule_time |
| synapse_storage_transaction_time |
Block and response metrics renamed for 0.27.0
---------------------------------------------
Synapse 0.27.0 begins the process of rationalising the duplicate
`*:count` metrics reported for the resource tracking for code blocks and
HTTP requests.
At the same time, the corresponding `*:total` metrics are being renamed,
as the `:total` suffix no longer makes sense in the absence of a
corresponding `:count` metric.
To enable a graceful migration path, this release just adds new names
for the metrics being renamed. A future release will remove the old
ones.
The following table shows the new metrics, and the old metrics which
they are replacing.
| New name | Old name |
| ------------------------------------------------------------- | ---------------------------------------------------------- |
| synapse_util_metrics_block_count | synapse_util_metrics_block_timer:count |
| synapse_util_metrics_block_count | synapse_util_metrics_block_ru_utime:count |
| synapse_util_metrics_block_count | synapse_util_metrics_block_ru_stime:count |
| synapse_util_metrics_block_count | synapse_util_metrics_block_db_txn_count:count |
| synapse_util_metrics_block_count | synapse_util_metrics_block_db_txn_duration:count |
| synapse_util_metrics_block_time_seconds | synapse_util_metrics_block_timer:total |
| synapse_util_metrics_block_ru_utime_seconds | synapse_util_metrics_block_ru_utime:total |
| synapse_util_metrics_block_ru_stime_seconds | synapse_util_metrics_block_ru_stime:total |
| synapse_util_metrics_block_db_txn_count | synapse_util_metrics_block_db_txn_count:total |
| synapse_util_metrics_block_db_txn_duration_seconds | synapse_util_metrics_block_db_txn_duration:total |
| synapse_http_server_response_count | synapse_http_server_requests |
| synapse_http_server_response_count | synapse_http_server_response_time:count |
| synapse_http_server_response_count | synapse_http_server_response_ru_utime:count |
| synapse_http_server_response_count | synapse_http_server_response_ru_stime:count |
| synapse_http_server_response_count | synapse_http_server_response_db_txn_count:count |
| synapse_http_server_response_count | synapse_http_server_response_db_txn_duration:count |
| synapse_http_server_response_time_seconds | synapse_http_server_response_time:total |
| synapse_http_server_response_ru_utime_seconds | synapse_http_server_response_ru_utime:total |
| synapse_http_server_response_ru_stime_seconds | synapse_http_server_response_ru_stime:total |
| synapse_http_server_response_db_txn_count | synapse_http_server_response_db_txn_count:total |
| synapse_http_server_response_db_txn_duration_seconds | synapse_http_server_response_db_txn_duration:total |
Standard Metric Names
---------------------
As of synapse version 0.18.2, the format of the process-wide metrics has
been changed to fit prometheus standard naming conventions. Additionally
the units have been changed to seconds, from milliseconds.
| New name | Old name |
| ---------------------------------------- | --------------------------------- |
| process_cpu_user_seconds_total | process_resource_utime / 1000 |
| process_cpu_system_seconds_total | process_resource_stime / 1000 |
| process_open_fds (no \'type\' label) | process_fds |
The python-specific counts of garbage collector performance have been
renamed.
| New name | Old name |
| -------------------------------- | -------------------------- |
| python_gc_time | reactor_gc_time |
| python_gc_unreachable_total | reactor_gc_unreachable |
| python_gc_counts | reactor_gc_counts |
The twisted-specific reactor metrics have been renamed.
| New name | Old name |
| -------------------------------------- | ----------------------- |
| python_twisted_reactor_pending_calls | reactor_pending_calls |
| python_twisted_reactor_tick_time | reactor_tick_time |

View file

@ -117,6 +117,14 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.146.0
## Drop support for Ubuntu 25.04 Plucky Puffin, and add support for 25.10 Questing Quokka
Ubuntu 25.04 Plucky Puffin [is end-of-life as of 17 Jan
2026](https://endoflife.date/ubuntu). This release drops support for Ubuntu
25.04, and in its place adds support for Ubuntu 25.10 Questing Quokka.
# Upgrading to v1.144.0
## Worker support for unstable MSC4140 `/restart` endpoint
@ -828,7 +836,7 @@ the names of Prometheus metrics.
If you want to test your changes before legacy names are disabled by default,
you may specify `enable_legacy_metrics: false` in your homeserver configuration.
A list of affected metrics is available on the [Metrics How-to page](https://element-hq.github.io/synapse/v1.69/metrics-howto.html?highlight=metrics%20deprecated#renaming-of-metrics--deprecation-of-old-names-in-12).
A list of affected metrics is available on the [Metrics How-to page](https://element-hq.github.io/synapse/v1.69/metrics-howto.html#renaming-of-metrics--deprecation-of-old-names-in-12).
## Deprecation of the `generate_short_term_login_token` module API method
@ -2423,7 +2431,7 @@ back to v1.3.1, subject to the following:
Some counter metrics have been renamed, with the old names deprecated.
See [the metrics
documentation](metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12)
documentation](https://element-hq.github.io/synapse/v1.69/metrics-howto.html#renaming-of-metrics--deprecation-of-old-names-in-12)
for details.
# Upgrading to v1.1.0

View file

@ -2041,6 +2041,25 @@ rc_room_creation:
burst_count: 5.0
```
---
### `rc_user_directory`
*(object)* This option allows admins to ratelimit searches in the user directory.
_Added in Synapse 1.145.0._
This setting has the following sub-options:
* `per_second` (number): Maximum number of requests a client can send per second.
* `burst_count` (number): Maximum number of requests a client can send before being throttled.
Default configuration:
```yaml
rc_user_directory:
per_second: 0.016
burst_count: 200.0
```
---
### `federation_rr_transactions_per_room_per_second`
*(integer)* Sets outgoing federation transaction frequency for sending read-receipts, per-room.
@ -2092,6 +2111,16 @@ Example configuration:
enable_media_repo: false
```
---
### `enable_local_media_storage`
*(boolean)* Enable the local on-disk media storage provider. When disabled, media is stored only in configured `media_storage_providers` and temporary files are used for processing.
**Warning:** If this option is set to `false` and no `media_storage_providers` are configured, all media requests will return 404 errors as there will be no storage backend available. Defaults to `true`.
Example configuration:
```yaml
enable_local_media_storage: false
```
---
### `media_store_path`
*(string)* Directory where uploaded images and attachments are stored. Defaults to `"media_store"`.

View file

@ -9,27 +9,18 @@ point to additional JS/CSS in this directory that are added on each page load. I
addition, the `theme` directory contains files that overwrite their counterparts in
each of the default themes included with mdbook.
Currently we use these files to generate a floating Table of Contents panel. The code for
which was partially taken from
[JorelAli/mdBook-pagetoc](https://github.com/JorelAli/mdBook-pagetoc/)
before being modified such that it scrolls with the content of the page. This is handled
by the `table-of-contents.js/css` files. The table of contents panel only appears on pages
that have more than one header, as well as only appearing on desktop-sized monitors.
Currently we use these files to make a few modifications:
We remove the navigation arrows which typically appear on the left and right side of the
screen on desktop as they interfere with the table of contents. This is handled by
the `remove-nav-buttons.css` file.
* We stylise the chapter titles in the left sidebar by indenting them
slightly so that they are more visually distinguishable from the section headers
(the bold titles). This is done through the `indent-section-headers.css` file.
Finally, we also stylise the chapter titles in the left sidebar by indenting them
slightly so that they are more visually distinguishable from the section headers
(the bold titles). This is done through the `indent-section-headers.css` file.
In addition to these modifications, we have added a version picker to the documentation.
Users can switch between documentations for different versions of Synapse.
This functionality was implemented through the `version-picker.js` and
`version-picker.css` files.
* We add a version picker pertaining to the different documentation versions
shipped with each version of Synapse. This functionality was implemented through
the `version-picker.js` and `version-picker.css` files, and is currently the only
requirement for the custom `theme/`.
More information can be found in mdbook's official documentation for
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
and
[customising the default themes](https://rust-lang.github.io/mdBook/format/theme/index.html).
[customising the default themes](https://rust-lang.github.io/mdBook/format/theme/index.html).

View file

@ -1,8 +0,0 @@
/* Remove the prev, next chapter buttons as they interfere with the
* table of contents.
* Note that the table of contents only appears on desktop, thus we
* only remove the desktop (wide) chapter buttons.
*/
.nav-wide-wrapper {
display: none
}

View file

@ -1,47 +0,0 @@
:root {
--pagetoc-width: 250px;
}
@media only screen and (max-width:1439px) {
.sidetoc {
display: none;
}
}
@media only screen and (min-width:1440px) {
main {
position: relative;
margin-left: 100px !important;
margin-right: var(--pagetoc-width) !important;
}
.sidetoc {
margin-left: auto;
margin-right: auto;
left: calc(100% + (var(--content-max-width))/4 - 140px);
position: absolute;
text-align: right;
}
.pagetoc {
position: fixed;
width: var(--pagetoc-width);
overflow: auto;
right: 20px;
height: calc(100% - var(--menu-bar-height));
}
.pagetoc a {
color: var(--fg) !important;
display: block;
padding: 5px 15px 5px 10px;
text-align: left;
text-decoration: none;
}
.pagetoc a:hover,
.pagetoc a.active {
background: var(--sidebar-bg) !important;
color: var(--sidebar-fg) !important;
}
.pagetoc .active {
background: var(--sidebar-bg);
color: var(--sidebar-fg);
}
}

View file

@ -1,148 +0,0 @@
const getPageToc = () => document.getElementsByClassName('pagetoc')[0];
const pageToc = getPageToc();
const pageTocChildren = [...pageToc.children];
const headers = [...document.getElementsByClassName('header')];
// Select highlighted item in ToC when clicking an item
pageTocChildren.forEach(child => {
child.addEventHandler('click', () => {
pageTocChildren.forEach(child => {
child.classList.remove('active');
});
child.classList.add('active');
});
});
/**
* Test whether a node is in the viewport
*/
function isInViewport(node) {
const rect = node.getBoundingClientRect();
return rect.top >= 0 && rect.left >= 0 && rect.bottom <= (window.innerHeight || document.documentElement.clientHeight) && rect.right <= (window.innerWidth || document.documentElement.clientWidth);
}
/**
* Set a new ToC entry.
* Clear any previously highlighted ToC items, set the new one,
* and adjust the ToC scroll position.
*/
function setTocEntry() {
let activeEntry;
const pageTocChildren = [...getPageToc().children];
// Calculate which header is the current one at the top of screen
headers.forEach(header => {
if (window.pageYOffset >= header.offsetTop) {
activeEntry = header;
}
});
// Update selected item in ToC when scrolling
pageTocChildren.forEach(child => {
if (activeEntry.href.localeCompare(child.href) === 0) {
child.classList.add('active');
} else {
child.classList.remove('active');
}
});
let tocEntryForLocation = document.querySelector(`nav a[href="${activeEntry.href}"]`);
if (tocEntryForLocation) {
const headingForLocation = document.querySelector(activeEntry.hash);
if (headingForLocation && isInViewport(headingForLocation)) {
// Update ToC scroll
const nav = getPageToc();
const content = document.querySelector('html');
if (content.scrollTop !== 0) {
nav.scrollTo({
top: tocEntryForLocation.offsetTop - 100,
left: 0,
behavior: 'smooth',
});
} else {
nav.scrollTop = 0;
}
}
}
}
/**
* Populate sidebar on load
*/
window.addEventListener('load', () => {
// Prevent rendering the table of contents of the "print book" page, as it
// will end up being rendered into the output (in a broken-looking way)
// Get the name of the current page (i.e. 'print.html')
const pageNameExtension = window.location.pathname.split('/').pop();
// Split off the extension (as '.../print' is also a valid page name), which
// should result in 'print'
const pageName = pageNameExtension.split('.')[0];
if (pageName === "print") {
// Don't render the table of contents on this page
return;
}
// Only create table of contents if there is more than one header on the page
if (headers.length <= 1) {
return;
}
// Create an entry in the page table of contents for each header in the document
headers.forEach((header, index) => {
const link = document.createElement('a');
// Indent shows hierarchy
let indent = '0px';
switch (header.parentElement.tagName) {
case 'H1':
indent = '5px';
break;
case 'H2':
indent = '20px';
break;
case 'H3':
indent = '30px';
break;
case 'H4':
indent = '40px';
break;
case 'H5':
indent = '50px';
break;
case 'H6':
indent = '60px';
break;
default:
break;
}
let tocEntry;
if (index == 0) {
// Create a bolded title for the first element
tocEntry = document.createElement("strong");
tocEntry.innerHTML = header.text;
} else {
// All other elements are non-bold
tocEntry = document.createTextNode(header.text);
}
link.appendChild(tocEntry);
link.style.paddingLeft = indent;
link.href = header.href;
pageToc.appendChild(link);
});
setTocEntry.call();
});
// Handle active headers on scroll, if there is more than one header on the page
if (headers.length > 1) {
window.addEventListener('scroll', setTocEntry);
}

View file

@ -1,11 +1,11 @@
<!DOCTYPE HTML>
<html lang="{{ language }}" class="sidebar-visible no-js {{ default_theme }}">
<html lang="{{ language }}" class="{{ default_theme }} sidebar-visible" dir="{{ text_direction }}">
<head>
<!-- Book generated using mdBook -->
<meta charset="UTF-8">
<title>{{ title }}</title>
{{#if is_print }}
<meta name="robots" content="noindex" />
<meta name="robots" content="noindex">
{{/if}}
{{#if base_url}}
<base href="{{ base_url }}">
@ -15,60 +15,78 @@
<!-- Custom HTML head -->
{{> head}}
<meta content="text/html; charset=utf-8" http-equiv="Content-Type">
<meta name="description" content="{{ description }}">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="theme-color" content="#ffffff" />
<meta name="theme-color" content="#ffffff">
{{#if favicon_svg}}
<link rel="icon" href="{{ path_to_root }}favicon.svg">
<link rel="icon" href="{{ resource "favicon.svg" }}">
{{/if}}
{{#if favicon_png}}
<link rel="shortcut icon" href="{{ path_to_root }}favicon.png">
<link rel="shortcut icon" href="{{ resource "favicon.png" }}">
{{/if}}
<link rel="stylesheet" href="{{ path_to_root }}css/variables.css">
<link rel="stylesheet" href="{{ path_to_root }}css/general.css">
<link rel="stylesheet" href="{{ path_to_root }}css/chrome.css">
<link rel="stylesheet" href="{{ resource "css/variables.css" }}">
<link rel="stylesheet" href="{{ resource "css/general.css" }}">
<link rel="stylesheet" href="{{ resource "css/chrome.css" }}">
{{#if print_enable}}
<link rel="stylesheet" href="{{ path_to_root }}css/print.css" media="print">
<link rel="stylesheet" href="{{ resource "css/print.css" }}" media="print">
{{/if}}
<!-- Fonts -->
<link rel="stylesheet" href="{{ path_to_root }}FontAwesome/css/font-awesome.css">
{{#if copy_fonts}}
<link rel="stylesheet" href="{{ path_to_root }}fonts/fonts.css">
{{/if}}
<link rel="stylesheet" href="{{ resource "fonts/fonts.css" }}">
<!-- Highlight.js Stylesheets -->
<link rel="stylesheet" href="{{ path_to_root }}highlight.css">
<link rel="stylesheet" href="{{ path_to_root }}tomorrow-night.css">
<link rel="stylesheet" href="{{ path_to_root }}ayu-highlight.css">
<link rel="stylesheet" id="mdbook-highlight-css" href="{{ resource "highlight.css" }}">
<link rel="stylesheet" id="mdbook-tomorrow-night-css" href="{{ resource "tomorrow-night.css" }}">
<link rel="stylesheet" id="mdbook-ayu-highlight-css" href="{{ resource "ayu-highlight.css" }}">
<!-- Custom theme stylesheets -->
{{#each additional_css}}
<link rel="stylesheet" href="{{ ../path_to_root }}{{ this }}">
<link rel="stylesheet" href="{{ resource this }}">
{{/each}}
{{#if mathjax_support}}
<!-- MathJax -->
<script async type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
{{/if}}
<!-- Provide site root and default themes to javascript -->
<script>
const path_to_root = "{{ path_to_root }}";
const default_light_theme = "{{ default_theme }}";
const default_dark_theme = "{{ preferred_dark_theme }}";
{{#if search_js}}
window.path_to_searchindex_js = "{{ resource "searchindex.js" }}";
{{/if}}
</script>
<!-- Start loading toc.js asap -->
<script src="{{ resource "toc.js" }}"></script>
</head>
<body>
<!-- Provide site root to javascript -->
<script type="text/javascript">
var path_to_root = "{{ path_to_root }}";
var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}";
</script>
<div id="mdbook-help-container">
<div id="mdbook-help-popup">
<h2 class="mdbook-help-title">Keyboard shortcuts</h2>
<div>
<p>Press <kbd>←</kbd> or <kbd>→</kbd> to navigate between chapters</p>
{{#if search_enabled}}
<p>Press <kbd>S</kbd> or <kbd>/</kbd> to search in the book</p>
{{/if}}
<p>Press <kbd>?</kbd> to show this help</p>
<p>Press <kbd>Esc</kbd> to hide this help</p>
</div>
</div>
</div>
<div id="mdbook-body-container">
<!-- Work around some values being stored in localStorage wrapped in quotes -->
<script type="text/javascript">
<script>
try {
var theme = localStorage.getItem('mdbook-theme');
var sidebar = localStorage.getItem('mdbook-sidebar');
let theme = localStorage.getItem('mdbook-theme');
let sidebar = localStorage.getItem('mdbook-sidebar');
if (theme.startsWith('"') && theme.endsWith('"')) {
localStorage.setItem('mdbook-theme', theme.slice(1, theme.length - 1));
}
if (sidebar.startsWith('"') && sidebar.endsWith('"')) {
localStorage.setItem('mdbook-sidebar', sidebar.slice(1, sidebar.length - 1));
}
@ -76,91 +94,107 @@
</script>
<!-- Set the theme before any content is loaded, prevents flash -->
<script type="text/javascript">
var theme;
<script>
const default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? default_dark_theme : default_light_theme;
let theme;
try { theme = localStorage.getItem('mdbook-theme'); } catch(e) { }
if (theme === null || theme === undefined) { theme = default_theme; }
var html = document.querySelector('html');
html.classList.remove('no-js')
const html = document.documentElement;
html.classList.remove('{{ default_theme }}')
html.classList.add(theme);
html.classList.add('js');
html.classList.add("js");
</script>
<input type="checkbox" id="mdbook-sidebar-toggle-anchor" class="hidden">
<!-- Hide / unhide sidebar before it is displayed -->
<script type="text/javascript">
var html = document.querySelector('html');
var sidebar = 'hidden';
<script>
let sidebar = null;
const sidebar_toggle = document.getElementById("mdbook-sidebar-toggle-anchor");
if (document.body.clientWidth >= 1080) {
try { sidebar = localStorage.getItem('mdbook-sidebar'); } catch(e) { }
sidebar = sidebar || 'visible';
} else {
sidebar = 'hidden';
sidebar_toggle.checked = false;
}
if (sidebar === 'visible') {
sidebar_toggle.checked = true;
} else {
html.classList.remove('sidebar-visible');
}
html.classList.remove('sidebar-visible');
html.classList.add("sidebar-" + sidebar);
</script>
<nav id="sidebar" class="sidebar" aria-label="Table of contents">
<div class="sidebar-scrollbox">
{{#toc}}{{/toc}}
<nav id="mdbook-sidebar" class="sidebar" aria-label="Table of contents">
<!-- populated by js -->
<mdbook-sidebar-scrollbox class="sidebar-scrollbox"></mdbook-sidebar-scrollbox>
<noscript>
<iframe class="sidebar-iframe-outer" src="{{ path_to_root }}toc.html"></iframe>
</noscript>
<div id="mdbook-sidebar-resize-handle" class="sidebar-resize-handle">
<div class="sidebar-resize-indicator"></div>
</div>
<div id="sidebar-resize-handle" class="sidebar-resize-handle"></div>
</nav>
<div id="page-wrapper" class="page-wrapper">
<div id="mdbook-page-wrapper" class="page-wrapper">
<div class="page">
{{> header}}
<div id="menu-bar-hover-placeholder"></div>
<div id="menu-bar" class="menu-bar sticky bordered">
<div id="mdbook-menu-bar-hover-placeholder"></div>
<div id="mdbook-menu-bar" class="menu-bar sticky">
<div class="left-buttons">
<button id="sidebar-toggle" class="icon-button" type="button" title="Toggle Table of Contents" aria-label="Toggle Table of Contents" aria-controls="sidebar">
<i class="fa fa-bars"></i>
<label id="mdbook-sidebar-toggle" class="icon-button" for="mdbook-sidebar-toggle-anchor" title="Toggle Table of Contents" aria-label="Toggle Table of Contents" aria-controls="mdbook-sidebar">
{{fa "solid" "bars"}}
</label>
<button id="mdbook-theme-toggle" class="icon-button" type="button" title="Change theme" aria-label="Change theme" aria-haspopup="true" aria-expanded="false" aria-controls="mdbook-theme-list">
{{fa "solid" "paintbrush"}}
</button>
<button id="theme-toggle" class="icon-button" type="button" title="Change theme" aria-label="Change theme" aria-haspopup="true" aria-expanded="false" aria-controls="theme-list">
<i class="fa fa-paint-brush"></i>
</button>
<ul id="theme-list" class="theme-popup" aria-label="Themes" role="menu">
<li role="none"><button role="menuitem" class="theme" id="light">{{ theme_option "Light" }}</button></li>
<li role="none"><button role="menuitem" class="theme" id="rust">{{ theme_option "Rust" }}</button></li>
<li role="none"><button role="menuitem" class="theme" id="coal">{{ theme_option "Coal" }}</button></li>
<li role="none"><button role="menuitem" class="theme" id="navy">{{ theme_option "Navy" }}</button></li>
<li role="none"><button role="menuitem" class="theme" id="ayu">{{ theme_option "Ayu" }}</button></li>
<ul id="mdbook-theme-list" class="theme-popup" aria-label="Themes" role="menu">
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-default_theme">Auto</button></li>
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-light">Light</button></li>
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-rust">Rust</button></li>
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-coal">Coal</button></li>
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-navy">Navy</button></li>
<li role="none"><button role="menuitem" class="theme" id="mdbook-theme-ayu">Ayu</button></li>
</ul>
{{#if search_enabled}}
<button id="search-toggle" class="icon-button" type="button" title="Search. (Shortkey: s)" aria-label="Toggle Searchbar" aria-expanded="false" aria-keyshortcuts="S" aria-controls="searchbar">
<i class="fa fa-search"></i>
<button id="mdbook-search-toggle" class="icon-button" type="button" title="Search (`/`)" aria-label="Toggle Searchbar" aria-expanded="false" aria-keyshortcuts="/ s" aria-controls="mdbook-searchbar">
{{fa "solid" "magnifying-glass"}}
</button>
{{/if}}
<div class="version-picker">
<div class="dropdown">
<div class="select">
<span></span>
<i class="fa fa-chevron-down"></i>
</div>
<input type="hidden" name="version">
<ul class="dropdown-menu">
<!-- Versions will be added dynamically in version-picker.js -->
</ul>
</div>
<!-- BEGIN CUSTOM SYNAPSE MODIFICATIONS -->
<div class="version-picker">
<div class="dropdown">
<div class="select">
<span></span>
<i class="fa fa-chevron-down"></i>
</div>
<input type="hidden" name="version">
<ul class="dropdown-menu">
<!-- Versions will be added dynamically in version-picker.js -->
</ul>
</div>
</div>
<!-- END CUSTOM SYNAPSE MODIFICATIONS -->
<h1 class="menu-title">{{ book_title }}</h1>
<div class="right-buttons">
{{#if print_enable}}
<a href="{{ path_to_root }}print.html" title="Print this book" aria-label="Print this book">
<i id="print-button" class="fa fa-print"></i>
{{fa "solid" "print" "print-button"}}
</a>
{{/if}}
{{#if git_repository_url}}
<a href="{{git_repository_url}}" title="Git repository" aria-label="Git repository">
<i id="git-repository-button" class="fa {{git_repository_icon}}"></i>
{{fa git_repository_icon_class git_repository_icon}}
</a>
{{/if}}
{{#if git_repository_edit_url}}
<a href="{{git_repository_edit_url}}" title="Suggest an edit" aria-label="Suggest an edit">
<i id="git-edit-button" class="fa fa-edit"></i>
<a href="{{git_repository_edit_url}}" title="Suggest an edit" aria-label="Suggest an edit" rel="edit">
{{fa "solid" "pencil" "git-edit-button"}}
</a>
{{/if}}
@ -168,50 +202,58 @@
</div>
{{#if search_enabled}}
<div id="search-wrapper" class="hidden">
<form id="searchbar-outer" class="searchbar-outer">
<input type="search" id="searchbar" name="searchbar" placeholder="Search this book ..." aria-controls="searchresults-outer" aria-describedby="searchresults-header">
<div id="mdbook-search-wrapper" class="hidden">
<form id="mdbook-searchbar-outer" class="searchbar-outer">
<div class="search-wrapper">
<input type="search" id="mdbook-searchbar" name="searchbar" placeholder="Search this book ..." aria-controls="mdbook-searchresults-outer" aria-describedby="searchresults-header">
<div class="spinner-wrapper">
{{fa "solid" "spinner" "fa-spin"}}
</div>
</div>
</form>
<div id="searchresults-outer" class="searchresults-outer hidden">
<div id="searchresults-header" class="searchresults-header"></div>
<ul id="searchresults">
<div id="mdbook-searchresults-outer" class="searchresults-outer hidden">
<div id="mdbook-searchresults-header" class="searchresults-header"></div>
<ul id="mdbook-searchresults">
</ul>
</div>
</div>
{{/if}}
<!-- Apply ARIA attributes after the sidebar and the sidebar toggle button are added to the DOM -->
<script type="text/javascript">
document.getElementById('sidebar-toggle').setAttribute('aria-expanded', sidebar === 'visible');
document.getElementById('sidebar').setAttribute('aria-hidden', sidebar !== 'visible');
Array.from(document.querySelectorAll('#sidebar a')).forEach(function(link) {
<script>
document.getElementById('mdbook-sidebar-toggle').setAttribute('aria-expanded', sidebar === 'visible');
document.getElementById('mdbook-sidebar').setAttribute('aria-hidden', sidebar !== 'visible');
Array.from(document.querySelectorAll('#mdbook-sidebar a')).forEach(function(link) {
link.setAttribute('tabIndex', sidebar === 'visible' ? 0 : -1);
});
</script>
<div id="content" class="content">
<div id="mdbook-content" class="content">
<main>
<!-- Page table of contents -->
<div class="sidetoc">
<nav class="pagetoc"></nav>
</div>
{{{ content }}}
</main>
<nav class="nav-wrapper" aria-label="Page navigation">
<!-- Mobile navigation buttons -->
{{#previous}}
<a rel="prev" href="{{ path_to_root }}{{link}}" class="mobile-nav-chapters previous" title="Previous chapter" aria-label="Previous chapter" aria-keyshortcuts="Left">
<i class="fa fa-angle-left"></i>
{{#if previous}}
<a rel="prev" href="{{ path_to_root }}{{previous.link}}" class="mobile-nav-chapters previous" title="Previous chapter" aria-label="Previous chapter" aria-keyshortcuts="Left">
{{#if (eq ../text_direction "rtl")}}
{{fa "solid" "angle-right"}}
{{else}}
{{fa "solid" "angle-left"}}
{{/if}}
</a>
{{/previous}}
{{/if}}
{{#next}}
<a rel="next" href="{{ path_to_root }}{{link}}" class="mobile-nav-chapters next" title="Next chapter" aria-label="Next chapter" aria-keyshortcuts="Right">
<i class="fa fa-angle-right"></i>
{{#if next}}
<a rel="next prefetch" href="{{ path_to_root }}{{next.link}}" class="mobile-nav-chapters next" title="Next chapter" aria-label="Next chapter" aria-keyshortcuts="Right">
{{#if (eq ../text_direction "rtl")}}
{{fa "solid" "angle-left"}}
{{else}}
{{fa "solid" "angle-right"}}
{{/if}}
</a>
{{/next}}
{{/if}}
<div style="clear: both"></div>
</nav>
@ -219,92 +261,92 @@
</div>
<nav class="nav-wide-wrapper" aria-label="Page navigation">
{{#previous}}
<a rel="prev" href="{{ path_to_root }}{{link}}" class="nav-chapters previous" title="Previous chapter" aria-label="Previous chapter" aria-keyshortcuts="Left">
<i class="fa fa-angle-left"></i>
{{#if previous}}
<a rel="prev" href="{{ path_to_root }}{{previous.link}}" class="nav-chapters previous" title="Previous chapter" aria-label="Previous chapter" aria-keyshortcuts="Left">
{{#if (eq ../text_direction "rtl")}}
{{fa "solid" "angle-right"}}
{{else}}
{{fa "solid" "angle-left"}}
{{/if}}
</a>
{{/previous}}
{{/if}}
{{#next}}
<a rel="next" href="{{ path_to_root }}{{link}}" class="nav-chapters next" title="Next chapter" aria-label="Next chapter" aria-keyshortcuts="Right">
<i class="fa fa-angle-right"></i>
{{#if next}}
<a rel="next prefetch" href="{{ path_to_root }}{{next.link}}" class="nav-chapters next" title="Next chapter" aria-label="Next chapter" aria-keyshortcuts="Right">
{{#if (eq text_direction "rtl")}}
{{fa "solid" "angle-left"}}
{{else}}
{{fa "solid" "angle-right"}}
{{/if}}
</a>
{{/next}}
{{/if}}
</nav>
</div>
{{#if livereload}}
<template id=fa-eye>{{fa "solid" "eye"}}</template>
<template id=fa-eye-slash>{{fa "solid" "eye-slash"}}</template>
<template id=fa-copy>{{fa "regular" "copy"}}</template>
<template id=fa-play>{{fa "solid" "play"}}</template>
<template id=fa-clock-rotate-left>{{fa "solid" "clock-rotate-left"}}</template>
{{#if live_reload_endpoint}}
<!-- Livereload script (if served using the cli tool) -->
<script type="text/javascript">
var socket = new WebSocket("{{{livereload}}}");
<script>
const wsProtocol = location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsAddress = wsProtocol + "//" + location.host + "/" + "{{{live_reload_endpoint}}}";
const socket = new WebSocket(wsAddress);
socket.onmessage = function (event) {
if (event.data === "reload") {
socket.close();
location.reload();
}
};
window.onbeforeunload = function() {
socket.close();
}
</script>
{{/if}}
{{#if google_analytics}}
<!-- Google Analytics Tag -->
<script type="text/javascript">
var localAddrs = ["localhost", "127.0.0.1", ""];
// make sure we don't activate google analytics if the developer is
// inspecting the book locally...
if (localAddrs.indexOf(document.location.hostname) === -1) {
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', '{{google_analytics}}', 'auto');
ga('send', 'pageview');
}
</script>
{{/if}}
{{#if playground_line_numbers}}
<script type="text/javascript">
<script>
window.playground_line_numbers = true;
</script>
{{/if}}
{{#if playground_copyable}}
<script type="text/javascript">
<script>
window.playground_copyable = true;
</script>
{{/if}}
{{#if playground_js}}
<script src="{{ path_to_root }}ace.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}editor.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}mode-rust.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}theme-dawn.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}theme-tomorrow_night.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ resource "ace.js" }}"></script>
<script src="{{ resource "mode-rust.js" }}"></script>
<script src="{{ resource "editor.js" }}"></script>
<script src="{{ resource "theme-dawn.js" }}"></script>
<script src="{{ resource "theme-tomorrow_night.js" }}"></script>
{{/if}}
{{#if search_js}}
<script src="{{ path_to_root }}elasticlunr.min.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}mark.min.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}searcher.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ resource "elasticlunr.min.js" }}"></script>
<script src="{{ resource "mark.min.js" }}"></script>
<script src="{{ resource "searcher.js" }}"></script>
{{/if}}
<script src="{{ path_to_root }}clipboard.min.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}highlight.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ path_to_root }}book.js" type="text/javascript" charset="utf-8"></script>
<script src="{{ resource "clipboard.min.js" }}"></script>
<script src="{{ resource "highlight.js" }}"></script>
<script src="{{ resource "book.js" }}"></script>
<!-- Custom JS scripts -->
{{#each additional_js}}
<script type="text/javascript" src="{{ ../path_to_root }}{{this}}"></script>
<script src="{{ resource this}}"></script>
{{/each}}
{{#if is_print}}
{{#if mathjax_support}}
<script type="text/javascript">
<script>
window.addEventListener('load', function() {
MathJax.Hub.Register.StartupHook('End', function() {
window.setTimeout(window.print, 100);
@ -312,7 +354,7 @@
});
</script>
{{else}}
<script type="text/javascript">
<script>
window.addEventListener('load', function() {
window.setTimeout(window.print, 100);
});
@ -320,5 +362,21 @@
{{/if}}
{{/if}}
{{#if fragment_map}}
<script>
document.addEventListener('DOMContentLoaded', function() {
const fragmentMap =
{{{fragment_map}}}
;
const target = fragmentMap[window.location.hash];
if (target) {
let url = new URL(target, window.location.href);
window.location.replace(url.href);
}
});
</script>
{{/if}}
</div>
</body>
</html>

95
poetry.lock generated
View file

@ -31,7 +31,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
files = [
{file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"},
{file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"},
@ -481,7 +481,7 @@ description = "XML bomb protection for Python stdlib modules"
optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
@ -506,7 +506,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
@ -556,7 +556,7 @@ description = "Python wrapper for hiredis"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"redis\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"redis\""
files = [
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"},
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"},
@ -879,7 +879,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"opentracing\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"opentracing\""
files = [
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
]
@ -1017,7 +1017,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
files = [
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
@ -1119,7 +1119,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"url-preview\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"url-preview\""
files = [
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"},
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"},
@ -1405,7 +1405,7 @@ description = "An LDAP3 auth provider for Synapse"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
files = [
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
@ -1648,7 +1648,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"opentracing\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"opentracing\""
files = [
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
]
@ -1838,7 +1838,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"postgres\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"postgres\""
files = [
{file = "psycopg2-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:103e857f46bb76908768ead4e2d0ba1d1a130e7b8ed77d3ae91e8b33481813e8"},
{file = "psycopg2-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:210daed32e18f35e3140a1ebe059ac29209dd96468f2f7559aa59f75ee82a5cb"},
@ -1856,7 +1856,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
files = [
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
]
@ -1872,7 +1872,7 @@ description = "A Simple library to enable psycopg2 compatability"
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
files = [
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
]
@ -2154,7 +2154,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"cache-memory\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"cache-memory\""
files = [
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
@ -2162,30 +2162,45 @@ files = [
[[package]]
name = "pynacl"
version = "1.5.0"
version = "1.6.2"
description = "Python binding to the Networking and Cryptography (NaCl) library"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
{file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
{file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
{file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
{file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
{file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
{file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
{file = "pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88"},
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14"},
{file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444"},
{file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b"},
{file = "pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145"},
{file = "pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590"},
{file = "pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2"},
{file = "pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130"},
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6"},
{file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e"},
{file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577"},
{file = "pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa"},
{file = "pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0"},
{file = "pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c"},
{file = "pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c"},
]
[package.dependencies]
cffi = ">=1.4.1"
cffi = {version = ">=2.0.0", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.9\""}
[package.extras]
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
docs = ["sphinx (<7)", "sphinx_rtd_theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
[[package]]
name = "pyopenssl"
@ -2271,7 +2286,7 @@ description = "Python implementation of SAML Version 2 Standard"
optional = true
python-versions = ">=3.9,<4.0"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
@ -2296,7 +2311,7 @@ description = "Extensions to the standard Python datetime module"
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
@ -2324,7 +2339,7 @@ description = "World timezone definitions, modern and historical"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"},
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
@ -2728,7 +2743,7 @@ description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"sentry\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"sentry\""
files = [
{file = "sentry_sdk-2.46.0-py2.py3-none-any.whl", hash = "sha256:4eeeb60198074dff8d066ea153fa6f241fef1668c10900ea53a4200abc8da9b1"},
{file = "sentry_sdk-2.46.0.tar.gz", hash = "sha256:91821a23460725734b7741523021601593f35731808afc0bb2ba46c27b8acd91"},
@ -2938,7 +2953,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"opentracing\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"opentracing\""
files = [
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
@ -2954,7 +2969,7 @@ description = "Python bindings for the Apache Thrift RPC system"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"opentracing\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"opentracing\""
files = [
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
]
@ -3027,7 +3042,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"opentracing\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"opentracing\""
files = [
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
@ -3161,7 +3176,7 @@ description = "non-blocking redis client for python"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"redis\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"redis\""
files = [
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
@ -3372,14 +3387,14 @@ files = [
[[package]]
name = "urllib3"
version = "2.6.0"
version = "2.6.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f"},
{file = "urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1"},
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
]
[package.extras]
@ -3407,7 +3422,7 @@ description = "An XML Schema validator and decoder"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"saml2\" or extra == \"all\""
markers = "extra == \"all\" or extra == \"saml2\""
files = [
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},

View file

@ -1,6 +1,6 @@
[project]
name = "matrix-synapse"
version = "1.144.0"
version = "1.145.0rc3"
description = "Homeserver for the Matrix decentralised comms protocol"
readme = "README.rst"
authors = [
@ -415,28 +415,30 @@ line-ending = "auto"
manifest-path = "rust/Cargo.toml"
module-name = "synapse.synapse_rust"
python-source = "."
sdist-include = [
"AUTHORS.rst",
"book.toml",
"changelog.d",
"CHANGES.md",
"CONTRIBUTING.md",
"demo",
"docs",
"INSTALL.md",
"mypy.ini",
"scripts-dev",
"synmark",
"sytest-blacklist",
"tests",
"UPGRADE.rst",
"Cargo.toml",
"Cargo.lock",
"rust/Cargo.toml",
"rust/build.rs",
"rust/src/**",
include = [
{ path = "AUTHORS.rst", format = "sdist" },
{ path = "book.toml", format = "sdist" },
{ path = "changelog.d/**/*", format = "sdist" },
{ path = "CHANGES.md", format = "sdist" },
{ path = "CONTRIBUTING.md", format = "sdist" },
{ path = "demo/**/*", format = "sdist" },
{ path = "docs/**/*", format = "sdist" },
{ path = "INSTALL.md", format = "sdist" },
{ path = "LICENSE-AGPL-3.0", format = "sdist" },
{ path = "LICENSE-COMMERCIAL", format = "sdist" },
{ path = "mypy.ini", format = "sdist" },
{ path = "scripts-dev/**/*", format = "sdist" },
{ path = "synmark/**/*", format = "sdist" },
{ path = "sytest-blacklist", format = "sdist" },
{ path = "tests/**/*", format = "sdist" },
{ path = "UPGRADE.rst", format = "sdist" },
{ path = "Cargo.toml", format = "sdist" },
{ path = "Cargo.lock", format = "sdist" },
{ path = "rust/Cargo.toml", format = "sdist" },
{ path = "rust/build.rs", format = "sdist" },
{ path = "rust/src/**", format = "sdist" },
]
sdist-exclude = ["synapse/*.so"]
exclude = ["synapse/*.so"]
[build-system]
# The upper bounds here are defensive, intended to prevent situations like

View file

@ -1,5 +1,5 @@
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.144/synapse-config.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.145/synapse-config.schema.json
type: object
properties:
modules:
@ -2274,6 +2274,16 @@ properties:
examples:
- per_second: 1.0
burst_count: 5.0
rc_user_directory:
$ref: "#/$defs/rc"
description: >-
This option allows admins to ratelimit searches in the user directory.
_Added in Synapse 1.145.0._
default:
per_second: 0.016
burst_count: 200.0
federation_rr_transactions_per_room_per_second:
type: integer
description: >-
@ -2338,6 +2348,19 @@ properties:
default: true
examples:
- false
enable_local_media_storage:
type: boolean
description: >-
Enable the local on-disk media storage provider. When disabled, media is
stored only in configured `media_storage_providers` and temporary files are
used for processing.
**Warning:** If this option is set to `false` and no `media_storage_providers`
are configured, all media requests will return 404 errors as there will be
no storage backend available.
default: true
examples:
- false
media_store_path:
type: string
description: Directory where uploaded images and attachments are stored.

View file

@ -31,7 +31,7 @@ DISTS = (
"debian:sid", # (rolling distro, no EOL)
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
"ubuntu:plucky", # 25.04 (EOL 2026-01)
"ubuntu:questing", # 25.10 (EOL 2026-07)
"debian:trixie", # (EOL not specified yet)
)

View file

@ -172,7 +172,7 @@ if __name__ == "__main__":
# Expect JSON data on stdin.
context, book = json.load(sys.stdin)
for section in book["sections"]:
for section in book["items"]:
if "Chapter" in section and section["Chapter"]["path"] == "upgrade.md":
section["Chapter"]["content"] = section["Chapter"]["content"].replace(
"<!-- REPLACE_WITH_SCHEMA_VERSIONS -->", calculate_version_chart()

View file

@ -252,3 +252,9 @@ class RatelimitConfig(Config):
"rc_reports",
defaults={"per_second": 1, "burst_count": 5},
)
self.rc_user_directory = RatelimitSettings.parse(
config,
"rc_user_directory",
defaults={"per_second": 0.016, "burst_count": 200},
)

View file

@ -174,6 +174,11 @@ class ContentRepositoryConfig(Config):
config.get("media_store_path", "media_store")
)
# Whether to enable the local media storage provider. When disabled,
# media will only be stored in configured storage providers and temp
# files will be used for processing.
self.enable_local_media_storage = config.get("enable_local_media_storage", True)
backup_media_store_path = config.get("backup_media_store_path")
synchronous_backup_media_store = config.get(

View file

@ -44,7 +44,7 @@ from synapse.types import (
UserInfo,
create_requester,
)
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -251,7 +251,7 @@ class AdminHandler:
topological=last_event.depth,
)
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
events,

View file

@ -31,7 +31,7 @@ from synapse.handlers.presence import format_user_presence_state
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, Requester, UserID
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -156,7 +156,9 @@ class EventHandler:
event_id: str,
show_redacted: bool = False,
) -> EventBase | None:
"""Retrieve a single specified event.
"""Retrieve a single specified event on behalf of a user.
The event will be transformed in a user-specific and time-specific way,
e.g. having unsigned metadata added or being erased depending on who is accessing.
Args:
user: The local user requesting the event
@ -188,7 +190,7 @@ class EventHandler:
# The user is peeking if they aren't in the room already
is_peeking = not is_user_in_room
filtered = await filter_events_for_client(
filtered = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
[event],
@ -198,4 +200,4 @@ class EventHandler:
if not filtered:
raise AuthError(403, "You don't have permission to access that event.")
return event
return filtered[0]

View file

@ -49,7 +49,7 @@ from synapse.types import (
from synapse.util import unwrapFirstError
from synapse.util.async_helpers import concurrently_execute, gather_results
from synapse.util.caches.response_cache import ResponseCache
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -225,7 +225,7 @@ class InitialSyncHandler:
)
).addErrback(unwrapFirstError)
messages = await filter_events_for_client(
messages = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
messages,
@ -382,7 +382,7 @@ class InitialSyncHandler:
room_id, limit=pagin_config.limit, end_token=stream_token
)
messages = await filter_events_for_client(
messages = await filter_and_transform_events_for_client(
self._storage_controllers,
requester.user.to_string(),
messages,
@ -496,7 +496,7 @@ class InitialSyncHandler:
).addErrback(unwrapFirstError)
)
messages = await filter_events_for_client(
messages = await filter_and_transform_events_for_client(
self._storage_controllers,
requester.user.to_string(),
messages,

View file

@ -322,7 +322,7 @@ class MessageHandler:
current_membership,
_,
) = await self.store.get_local_current_membership_for_user_in_room(
user_id, event_id
user_id, room_id
)
return current_membership == Membership.JOIN

View file

@ -46,7 +46,7 @@ from synapse.types.handlers import ShutdownRoomParams, ShutdownRoomResponse
from synapse.types.state import StateFilter
from synapse.util.async_helpers import ReadWriteLock
from synapse.util.duration import Duration
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -684,7 +684,7 @@ class PaginationHandler:
events = await event_filter.filter(events)
if not use_admin_priviledge:
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
events,

View file

@ -40,7 +40,7 @@ from synapse.storage.databases.main.relations import ThreadsNextBatch, _RelatedE
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, Requester, UserID
from synapse.util.async_helpers import gather_results
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -154,7 +154,7 @@ class RelationsHandler:
[e.event_id for e in related_events]
)
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
events,
@ -599,7 +599,7 @@ class RelationsHandler:
# Limit the returned threads to those the user has participated in.
events = [event for event in events if participated[event.event_id]]
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
events,

View file

@ -95,7 +95,7 @@ from synapse.util.caches.response_cache import ResponseCache
from synapse.util.duration import Duration
from synapse.util.iterutils import batch_iter
from synapse.util.stringutils import parse_and_validate_server_name
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -1919,7 +1919,7 @@ class RoomContextHandler:
async def filter_evts(events: list[EventBase]) -> list[EventBase]:
if use_admin_priviledge:
return events
return await filter_events_for_client(
return await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
events,

View file

@ -33,7 +33,7 @@ from synapse.events import EventBase
from synapse.events.utils import SerializeEventConfig
from synapse.types import JsonDict, Requester, StrCollection, StreamKeyType, UserID
from synapse.types.state import StateFilter
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -479,7 +479,7 @@ class SearchHandler:
filtered_events = await search_filter.filter([r["event"] for r in results])
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
filtered_events,
@ -580,7 +580,7 @@ class SearchHandler:
filtered_events = await search_filter.filter([r["event"] for r in results])
events = await filter_events_for_client(
events = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
filtered_events,
@ -667,13 +667,13 @@ class SearchHandler:
len(res.events_after),
)
events_before = await filter_events_for_client(
events_before = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
res.events_before,
)
events_after = await filter_events_for_client(
events_after = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
res.events_after,

View file

@ -71,7 +71,7 @@ from synapse.types.handlers.sliding_sync import (
)
from synapse.types.state import StateFilter
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -755,7 +755,7 @@ class SlidingSyncHandler:
timeline_events.reverse()
# Make sure we don't expose any events that the client shouldn't see
timeline_events = await filter_events_for_client(
timeline_events = await filter_and_transform_events_for_client(
self.storage_controllers,
user.to_string(),
timeline_events,

View file

@ -78,7 +78,7 @@ from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
from synapse.util.metrics import Measure
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -679,7 +679,7 @@ class SyncHandler:
)
)
recents = await filter_events_for_client(
recents = await filter_and_transform_events_for_client(
self._storage_controllers,
sync_config.user.to_string(),
recents,
@ -789,7 +789,7 @@ class SyncHandler:
)
)
loaded_recents = await filter_events_for_client(
loaded_recents = await filter_and_transform_events_for_client(
self._storage_controllers,
sync_config.user.to_string(),
loaded_recents,

View file

@ -64,7 +64,10 @@ from synapse.media.media_storage import (
SHA256TransparentIOReader,
SHA256TransparentIOWriter,
)
from synapse.media.storage_provider import StorageProviderWrapper
from synapse.media.storage_provider import (
FileStorageProviderBackend,
StorageProviderWrapper,
)
from synapse.media.thumbnailer import Thumbnailer, ThumbnailError
from synapse.media.url_previewer import UrlPreviewer
from synapse.storage.databases.main.media_repository import LocalMedia, RemoteMedia
@ -142,10 +145,23 @@ class MediaRepository:
)
storage_providers.append(provider)
# If local media storage is enabled, create the local provider
local_provider: FileStorageProviderBackend | None = None
if hs.config.media.enable_local_media_storage and self.primary_base_path:
local_provider = FileStorageProviderBackend(hs, self.primary_base_path)
self.media_storage: MediaStorage = MediaStorage(
self.hs, self.primary_base_path, self.filepaths, storage_providers
self.hs, self.filepaths, storage_providers, local_provider
)
# Log a warning if there are no storage backends configured
if not hs.config.media.enable_local_media_storage and not storage_providers:
logger.warning(
"Local media storage is disabled and no media_storage_providers are "
"configured. All media requests will return 404 errors as there is "
"no storage backend available."
)
self.clock.looping_call(
self._start_update_recently_accessed, UPDATE_RECENTLY_ACCESSED_TS
)
@ -782,10 +798,18 @@ class MediaRepository:
except SynapseError:
raise
except Exception as e:
# An exception may be because we downloaded media in another
# process, so let's check if we magically have the media.
media_info = await self.store.get_cached_remote_media(server_name, media_id)
if not media_info:
# If this is a constraint violation, it means another worker
# downloaded the media first. We should fetch the existing media info.
if isinstance(e, self.store.database_engine.module.IntegrityError):
# The file has already been cleaned up in _download_remote_file
# Just fetch the existing media info
media_info = await self.store.get_cached_remote_media(
server_name, media_id
)
if not media_info:
# This shouldn't happen, but let's raise an error if it does
raise SynapseError(500, "Failed to fetch remote media")
else:
raise e
file_id = media_info.filesystem_id
@ -806,6 +830,39 @@ class MediaRepository:
responder = await self.media_storage.fetch_media(file_info)
return responder, media_info
async def _store_remote_media_with_cleanup(
self,
server_name: str,
media_id: str,
media_type: str,
time_now_ms: int,
upload_name: str | None,
media_length: int,
filesystem_id: str,
sha256: str,
fname: str,
) -> None:
"""Store remote media in database and clean up file on constraint violation."""
try:
await self.store.store_cached_remote_media(
origin=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=time_now_ms,
upload_name=upload_name,
media_length=media_length,
filesystem_id=filesystem_id,
sha256=sha256,
)
except self.store.database_engine.module.IntegrityError:
# Another worker downloaded the media first. Clean up our file.
try:
os.remove(fname)
except Exception:
pass
# Re-raise so the caller can handle it
raise
async def _download_remote_file(
self,
server_name: str,
@ -890,26 +947,21 @@ class MediaRepository:
upload_name = get_filename_from_headers(headers)
time_now_ms = self.clock.time_msec()
# Multiple remote media download requests can race (when using
# multiple media repos), so this may throw a violation constraint
# exception. If it does we'll delete the newly downloaded file from
# disk (as we're in the ctx manager).
#
# However: we've already called `finish()` so we may have also
# written to the storage providers. This is preferable to the
# alternative where we call `finish()` *after* this, where we could
# end up having an entry in the DB but fail to write the files to
# the storage providers.
await self.store.store_cached_remote_media(
origin=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=time_now_ms,
upload_name=upload_name,
media_length=length,
filesystem_id=file_id,
sha256=sha256writer.hexdigest(),
)
# Multiple remote media download requests can race (when using
# multiple media repos), so this may throw a violation constraint
# exception. If it does we'll delete the newly downloaded file from
# disk.
await self._store_remote_media_with_cleanup(
server_name=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=time_now_ms,
upload_name=upload_name,
media_length=length,
filesystem_id=file_id,
sha256=sha256writer.hexdigest(),
fname=fname,
)
logger.info("Stored remote media in file %r", fname)
@ -928,7 +980,6 @@ class MediaRepository:
filesystem_id=file_id,
last_access_ts=time_now_ms,
quarantined_by=None,
quarantined_ts=None,
authenticated=authenticated,
sha256=sha256writer.hexdigest(),
)
@ -1024,26 +1075,21 @@ class MediaRepository:
upload_name = get_filename_from_headers(headers)
time_now_ms = self.clock.time_msec()
# Multiple remote media download requests can race (when using
# multiple media repos), so this may throw a violation constraint
# exception. If it does we'll delete the newly downloaded file from
# disk (as we're in the ctx manager).
#
# However: we've already called `finish()` so we may have also
# written to the storage providers. This is preferable to the
# alternative where we call `finish()` *after* this, where we could
# end up having an entry in the DB but fail to write the files to
# the storage providers.
await self.store.store_cached_remote_media(
origin=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=time_now_ms,
upload_name=upload_name,
media_length=length,
filesystem_id=file_id,
sha256=sha256writer.hexdigest(),
)
# Multiple remote media download requests can race (when using
# multiple media repos), so this may throw a violation constraint
# exception. If it does we'll delete the newly downloaded file from
# disk.
await self._store_remote_media_with_cleanup(
server_name=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=time_now_ms,
upload_name=upload_name,
media_length=length,
filesystem_id=file_id,
sha256=sha256writer.hexdigest(),
fname=fname,
)
logger.debug("Stored remote media in file %r", fname)
@ -1062,7 +1108,6 @@ class MediaRepository:
filesystem_id=file_id,
last_access_ts=time_now_ms,
quarantined_by=None,
quarantined_ts=None,
authenticated=authenticated,
sha256=sha256writer.hexdigest(),
)
@ -1117,32 +1162,31 @@ class MediaRepository:
t_type: str,
url_cache: bool,
) -> tuple[str, FileInfo] | None:
input_path = await self.media_storage.ensure_media_is_in_local_cache(
async with self.media_storage.ensure_media_is_in_local_cache(
FileInfo(None, media_id, url_cache=url_cache)
)
) as input_path:
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate a thumbnail for local media %s using a method of %s and type of %s: %s",
media_id,
t_method,
t_type,
e,
)
return None
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate a thumbnail for local media %s using a method of %s and type of %s: %s",
media_id,
t_method,
t_type,
e,
)
return None
with thumbnailer:
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
self._generate_thumbnail,
thumbnailer,
t_width,
t_height,
t_method,
t_type,
)
with thumbnailer:
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
self._generate_thumbnail,
thumbnailer,
t_width,
t_height,
t_method,
t_type,
)
if t_byte_source:
try:
@ -1193,33 +1237,32 @@ class MediaRepository:
t_method: str,
t_type: str,
) -> str | None:
input_path = await self.media_storage.ensure_media_is_in_local_cache(
async with self.media_storage.ensure_media_is_in_local_cache(
FileInfo(server_name, file_id)
)
) as input_path:
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate a thumbnail for remote media %s from %s using a method of %s and type of %s: %s",
media_id,
server_name,
t_method,
t_type,
e,
)
return None
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate a thumbnail for remote media %s from %s using a method of %s and type of %s: %s",
media_id,
server_name,
t_method,
t_type,
e,
)
return None
with thumbnailer:
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
self._generate_thumbnail,
thumbnailer,
t_width,
t_height,
t_method,
t_type,
)
with thumbnailer:
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
self._generate_thumbnail,
thumbnailer,
t_width,
t_height,
t_method,
t_type,
)
if t_byte_source:
try:
@ -1289,151 +1332,157 @@ class MediaRepository:
if not requirements:
return None
input_path = await self.media_storage.ensure_media_is_in_local_cache(
async with self.media_storage.ensure_media_is_in_local_cache(
FileInfo(server_name, file_id, url_cache=url_cache)
)
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate thumbnails for remote media %s from %s of type %s: %s",
media_id,
server_name,
media_type,
e,
)
return None
with thumbnailer:
m_width = thumbnailer.width
m_height = thumbnailer.height
if m_width * m_height >= self.max_image_pixels:
logger.info(
"Image too large to thumbnail %r x %r > %r",
m_width,
m_height,
self.max_image_pixels,
) as input_path:
try:
thumbnailer = Thumbnailer(input_path)
except ThumbnailError as e:
logger.warning(
"Unable to generate thumbnails for remote media %s from %s of type %s: %s",
media_id,
server_name,
media_type,
e,
)
return None
if thumbnailer.transpose_method is not None:
m_width, m_height = await defer_to_thread(
self.hs.get_reactor(), thumbnailer.transpose
)
with thumbnailer:
m_width = thumbnailer.width
m_height = thumbnailer.height
# We deduplicate the thumbnail sizes by ignoring the cropped versions if
# they have the same dimensions of a scaled one.
thumbnails: dict[tuple[int, int, str], str] = {}
for requirement in requirements:
if requirement.method == "crop":
thumbnails.setdefault(
(requirement.width, requirement.height, requirement.media_type),
requirement.method,
if m_width * m_height >= self.max_image_pixels:
logger.info(
"Image too large to thumbnail %r x %r > %r",
m_width,
m_height,
self.max_image_pixels,
)
elif requirement.method == "scale":
t_width, t_height = thumbnailer.aspect(
requirement.width, requirement.height
)
t_width = min(m_width, t_width)
t_height = min(m_height, t_height)
thumbnails[(t_width, t_height, requirement.media_type)] = (
requirement.method
return None
if thumbnailer.transpose_method is not None:
m_width, m_height = await defer_to_thread(
self.hs.get_reactor(), thumbnailer.transpose
)
# Now we generate the thumbnails for each dimension, store it
for (t_width, t_height, t_type), t_method in thumbnails.items():
# Generate the thumbnail
if t_method == "crop":
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
thumbnailer.crop,
t_width,
t_height,
t_type,
# We deduplicate the thumbnail sizes by ignoring the cropped versions if
# they have the same dimensions of a scaled one.
thumbnails: dict[tuple[int, int, str], str] = {}
for requirement in requirements:
if requirement.method == "crop":
thumbnails.setdefault(
(
requirement.width,
requirement.height,
requirement.media_type,
),
requirement.method,
)
elif requirement.method == "scale":
t_width, t_height = thumbnailer.aspect(
requirement.width, requirement.height
)
t_width = min(m_width, t_width)
t_height = min(m_height, t_height)
thumbnails[(t_width, t_height, requirement.media_type)] = (
requirement.method
)
# Now we generate the thumbnails for each dimension, store it
for (t_width, t_height, t_type), t_method in thumbnails.items():
# Generate the thumbnail
if t_method == "crop":
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
thumbnailer.crop,
t_width,
t_height,
t_type,
)
elif t_method == "scale":
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
thumbnailer.scale,
t_width,
t_height,
t_type,
)
else:
logger.error("Unrecognized method: %r", t_method)
continue
if not t_byte_source:
continue
file_info = FileInfo(
server_name=server_name,
file_id=file_id,
url_cache=url_cache,
thumbnail=ThumbnailInfo(
width=t_width,
height=t_height,
method=t_method,
type=t_type,
length=t_byte_source.tell(),
),
)
elif t_method == "scale":
t_byte_source = await defer_to_thread(
self.hs.get_reactor(),
thumbnailer.scale,
t_width,
t_height,
t_type,
)
else:
logger.error("Unrecognized method: %r", t_method)
continue
if not t_byte_source:
continue
file_info = FileInfo(
server_name=server_name,
file_id=file_id,
url_cache=url_cache,
thumbnail=ThumbnailInfo(
width=t_width,
height=t_height,
method=t_method,
type=t_type,
length=t_byte_source.tell(),
),
)
async with self.media_storage.store_into_file(file_info) as (f, fname):
try:
await self.media_storage.write_to_file(t_byte_source, f)
finally:
t_byte_source.close()
# We flush and close the file to ensure that the bytes have
# been written before getting the size.
f.flush()
f.close()
t_len = os.path.getsize(fname)
# Write to database
if server_name:
# Multiple remote media download requests can race (when
# using multiple media repos), so this may throw a violation
# constraint exception. If it does we'll delete the newly
# generated thumbnail from disk (as we're in the ctx
# manager).
#
# However: we've already called `finish()` so we may have
# also written to the storage providers. This is preferable
# to the alternative where we call `finish()` *after* this,
# where we could end up having an entry in the DB but fail
# to write the files to the storage providers.
async with self.media_storage.store_into_file(file_info) as (
f,
fname,
):
try:
await self.store.store_remote_media_thumbnail(
server_name,
media_id,
file_id,
t_width,
t_height,
t_type,
t_method,
t_len,
)
except Exception as e:
thumbnail_exists = (
await self.store.get_remote_media_thumbnail(
await self.media_storage.write_to_file(t_byte_source, f)
finally:
t_byte_source.close()
# We flush and close the file to ensure that the bytes have
# been written before getting the size.
f.flush()
f.close()
t_len = os.path.getsize(fname)
# Write to database
if server_name:
# Multiple remote media download requests can race (when
# using multiple media repos), so this may throw a violation
# constraint exception. If it does we'll delete the newly
# generated thumbnail from disk (as we're in the ctx
# manager).
#
# However: we've already called `finish()` so we may have
# also written to the storage providers. This is preferable
# to the alternative where we call `finish()` *after* this,
# where we could end up having an entry in the DB but fail
# to write the files to the storage providers.
try:
await self.store.store_remote_media_thumbnail(
server_name,
media_id,
file_id,
t_width,
t_height,
t_type,
t_method,
t_len,
)
except Exception as e:
thumbnail_exists = (
await self.store.get_remote_media_thumbnail(
server_name,
media_id,
t_width,
t_height,
t_type,
)
)
if not thumbnail_exists:
raise e
else:
await self.store.store_local_thumbnail(
media_id, t_width, t_height, t_type, t_method, t_len
)
if not thumbnail_exists:
raise e
else:
await self.store.store_local_thumbnail(
media_id, t_width, t_height, t_type, t_method, t_len
)
return {"width": m_width, "height": m_height}

View file

@ -24,6 +24,7 @@ import json
import logging
import os
import shutil
import tempfile
from contextlib import closing
from io import BytesIO
from types import TracebackType
@ -49,13 +50,13 @@ from twisted.internet.interfaces import IConsumer
from synapse.api.errors import NotFoundError
from synapse.logging.context import defer_to_thread, run_in_background
from synapse.logging.opentracing import start_active_span, trace, trace_with_opname
from synapse.media._base import ThreadedFileSender
from synapse.media.storage_provider import FileStorageProviderBackend
from synapse.util.clock import Clock
from synapse.util.duration import Duration
from synapse.util.file_consumer import BackgroundFileConsumer
from ..types import JsonDict
from ._base import FileInfo, Responder
from ._base import FileInfo, Responder, ThreadedFileSender
from .filepath import MediaFilePaths
if TYPE_CHECKING:
@ -150,27 +151,30 @@ class SHA256TransparentIOReader:
class MediaStorage:
"""Responsible for storing/fetching files from local sources.
"""Responsible for storing/fetching files from storage providers.
Args:
hs
local_media_directory: Base path where we store media on disk
filepaths
storage_providers: List of StorageProvider that are used to fetch and store files.
local_provider: Optional local file storage provider for caching media on disk.
"""
def __init__(
self,
hs: "HomeServer",
local_media_directory: str,
filepaths: MediaFilePaths,
storage_providers: Sequence["StorageProvider"],
local_provider: "FileStorageProviderBackend | None" = None,
):
self.hs = hs
self.reactor = hs.get_reactor()
self.local_media_directory = local_media_directory
self.filepaths = filepaths
self.storage_providers = storage_providers
self.local_provider = local_provider
self.local_media_directory: str | None = None
if local_provider is not None:
self.local_media_directory = local_provider.base_directory
self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker
self.clock = hs.get_clock()
@ -205,11 +209,11 @@ class MediaStorage:
"""Async Context manager used to get a file like object to write into, as
described by file_info.
Actually yields a 2-tuple (file, fname,), where file is a file
like object that can be written to and fname is the absolute path of file
on disk.
Actually yields a 2-tuple (file, media_filepath,), where file is a file
like object that can be written to and media_filepath is the absolute path
of the file on disk.
fname can be used to read the contents from after upload, e.g. to
media_filepath can be used to read the contents from after upload, e.g. to
generate thumbnails.
Args:
@ -217,25 +221,33 @@ class MediaStorage:
Example:
async with media_storage.store_into_file(info) as (f, fname,):
async with media_storage.store_into_file(info) as (f, media_filepath,):
# .. write into f ...
"""
path = self._file_info_to_path(file_info)
fname = os.path.join(self.local_media_directory, path)
is_temp_file = False
dirname = os.path.dirname(fname)
os.makedirs(dirname, exist_ok=True)
if self.local_provider:
media_filepath = os.path.join(self.local_media_directory, path) # type: ignore[arg-type]
os.makedirs(os.path.dirname(media_filepath), exist_ok=True)
try:
with start_active_span("writing to main media repo"):
with open(fname, "wb") as f:
yield f, fname
with open(media_filepath, "wb") as f:
yield f, media_filepath
else:
# No local provider, write to temp file
is_temp_file = True
with tempfile.NamedTemporaryFile(delete=False) as f:
media_filepath = f.name
yield cast(BinaryIO, f), media_filepath
with start_active_span("writing to other storage providers"):
# Spam check and store to other providers (runs for both local and temp file cases)
try:
with start_active_span("spam checking and writing to storage providers"):
spam_check = (
await self._spam_checker_module_callbacks.check_media_file_for_spam(
ReadableFileWrapper(self.clock, fname), file_info
ReadableFileWrapper(self.clock, media_filepath), file_info
)
)
if spam_check != self._spam_checker_module_callbacks.NOT_SPAM:
@ -251,17 +263,23 @@ class MediaStorage:
with start_active_span(str(provider)):
await provider.store_file(path, file_info)
# If using a temp file, delete it after uploading to storage providers
if is_temp_file:
try:
os.remove(media_filepath)
except Exception:
pass
except Exception as e:
try:
os.remove(fname)
os.remove(media_filepath)
except Exception:
pass
raise e from None
async def fetch_media(self, file_info: FileInfo) -> Responder | None:
"""Attempts to fetch media described by file_info from the local cache
and configured storage providers.
"""Attempts to fetch media described by file_info from the configured storage providers.
Args:
file_info: Metadata about the media file
@ -269,6 +287,18 @@ class MediaStorage:
Returns:
Returns a Responder if the file was found, otherwise None.
"""
# URL cache files are stored locally and should not go through storage providers
if file_info.url_cache:
path = self._file_info_to_path(file_info)
if self.local_provider:
local_path = os.path.join(self.local_media_directory, path) # type: ignore[arg-type]
if os.path.isfile(local_path):
# Import here to avoid circular import
from .media_storage import FileResponder
return FileResponder(self.hs, open(local_path, "rb"))
return None
paths = [self._file_info_to_path(file_info)]
# fallback for remote thumbnails with no method in the filename
@ -283,16 +313,18 @@ class MediaStorage:
)
)
for path in paths:
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
logger.debug("responding with local file %s", local_path)
return FileResponder(self.hs, open(local_path, "rb"))
logger.debug("local file %s did not exist", local_path)
# Check local provider first, then other storage providers
if self.local_provider:
for path in paths:
res: Any = await self.local_provider.fetch(path, file_info)
if res:
logger.debug("Streaming %s from %s", path, self.local_provider)
return res
logger.debug("%s not found on %s", path, self.local_provider)
for provider in self.storage_providers:
for path in paths:
res: Any = await provider.fetch(path, file_info)
res = await provider.fetch(path, file_info)
if res:
logger.debug("Streaming %s from %s", path, provider)
return res
@ -301,50 +333,93 @@ class MediaStorage:
return None
@trace
async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str:
"""Ensures that the given file is in the local cache. Attempts to
download it from storage providers if it isn't.
@contextlib.asynccontextmanager
async def ensure_media_is_in_local_cache(
self, file_info: FileInfo
) -> AsyncIterator[str]:
"""Async context manager that ensures the given file is in the local cache.
Attempts to download it from storage providers if it isn't.
When no local provider is configured, the file is downloaded to a temporary
location and automatically cleaned up when the context manager exits.
Args:
file_info
Returns:
Yields:
Full path to local file
Example:
async with media_storage.ensure_media_is_in_local_cache(file_info) as path:
# use path to read the file
"""
path = self._file_info_to_path(file_info)
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
return local_path
if self.local_provider:
local_path = os.path.join(self.local_media_directory, path) # type: ignore[arg-type]
if os.path.exists(local_path):
yield local_path
return
# Fallback for paths without method names
# Should be removed in the future
if file_info.thumbnail and file_info.server_name:
legacy_path = self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
legacy_local_path = os.path.join(self.local_media_directory, legacy_path)
if os.path.exists(legacy_local_path):
return legacy_local_path
# Fallback for paths without method names
# Should be removed in the future
if file_info.thumbnail and file_info.server_name:
legacy_path = self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
legacy_local_path = os.path.join(
self.local_media_directory, # type: ignore[arg-type]
legacy_path,
)
if os.path.exists(legacy_local_path):
yield legacy_local_path
return
dirname = os.path.dirname(local_path)
os.makedirs(dirname, exist_ok=True)
os.makedirs(os.path.dirname(local_path), exist_ok=True)
for provider in self.storage_providers:
res: Any = await provider.fetch(path, file_info)
if res:
with res:
consumer = BackgroundFileConsumer(
open(local_path, "wb"), self.reactor
)
await res.write_to_consumer(consumer)
await consumer.wait()
return local_path
for provider in self.storage_providers:
remote_res: Any = await provider.fetch(path, file_info)
if remote_res:
with remote_res:
consumer = BackgroundFileConsumer(
open(local_path, "wb"), self.reactor
)
await remote_res.write_to_consumer(consumer)
await consumer.wait()
yield local_path
return
raise NotFoundError()
raise NotFoundError()
else:
# No local provider, download to temp file and clean up after use
for provider in self.storage_providers:
res: Any = await provider.fetch(path, file_info)
if res:
temp_path = None
try:
with tempfile.NamedTemporaryFile(
delete=False, suffix=os.path.splitext(path)[1]
) as tmp:
temp_path = tmp.name
with res:
consumer = BackgroundFileConsumer(
open(temp_path, "wb"), self.reactor
)
await res.write_to_consumer(consumer)
await consumer.wait()
yield temp_path
finally:
if temp_path:
try:
os.remove(temp_path)
except Exception:
pass
return
raise NotFoundError()
@trace
def _file_info_to_path(self, file_info: FileInfo) -> str:

View file

@ -31,7 +31,6 @@ from synapse.logging.opentracing import start_active_span, trace_with_opname
from synapse.util.async_helpers import maybe_awaitable
from ._base import FileInfo, Responder
from .media_storage import FileResponder
logger = logging.getLogger(__name__)
@ -178,6 +177,9 @@ class FileStorageProviderBackend(StorageProvider):
backup_fname = os.path.join(self.base_directory, path)
if os.path.isfile(backup_fname):
# Import here to avoid circular import
from .media_storage import FileResponder
return FileResponder(self.hs, open(backup_fname, "rb"))
return None

View file

@ -633,13 +633,6 @@ class ThumbnailProvider:
# width/height/method so we can just call the "generate exact"
# methods.
# First let's check that we do actually have the original image
# still. This will throw a 404 if we don't.
# TODO: We should refetch the thumbnails for remote media.
await self.media_storage.ensure_media_is_in_local_cache(
FileInfo(server_name, file_id, url_cache=url_cache)
)
if server_name:
await self.media_repo.generate_remote_exact_thumbnail(
server_name,

View file

@ -63,7 +63,7 @@ from synapse.util.async_helpers import (
)
from synapse.util.duration import Duration
from synapse.util.stringutils import shortstr
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -783,7 +783,7 @@ class Notifier:
)
if keyname == StreamKeyType.ROOM:
new_events = await filter_events_for_client(
new_events = await filter_and_transform_events_for_client(
self._storage_controllers,
user.to_string(),
new_events,

View file

@ -49,7 +49,7 @@ from synapse.storage.databases.main.event_push_actions import EmailPushAction
from synapse.types import StateMap, UserID
from synapse.types.state import StateFilter
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
from synapse.visibility import filter_and_transform_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
@ -537,12 +537,11 @@ class Mailer:
"messages": [],
}
the_events = await filter_events_for_client(
the_events = await filter_and_transform_events_for_client(
self._storage_controllers,
user_id,
results.events_before,
results.events_before + [notif_event],
)
the_events.append(notif_event)
for event in the_events:
messagevars = await self._get_message_vars(notif, event, room_state_ids)

View file

@ -505,6 +505,18 @@ class NewActiveTaskCommand(_SimpleCommand):
NAME = "NEW_ACTIVE_TASK"
class CancelTaskCommand(_SimpleCommand):
"""Sent to inform the instance handling background tasks that a task
has been cancelled and should be terminated.
Format::
CANCEL_TASK "<task_id>"
"""
NAME = "CANCEL_TASK"
_COMMANDS: tuple[type[Command], ...] = (
ServerCommand,
RdataCommand,
@ -520,6 +532,7 @@ _COMMANDS: tuple[type[Command], ...] = (
ClearUserSyncsCommand,
LockReleasedCommand,
NewActiveTaskCommand,
CancelTaskCommand,
)
# Map of command name to command type.

View file

@ -35,6 +35,7 @@ from twisted.internet.protocol import ReconnectingClientFactory
from synapse.metrics import SERVER_NAME_LABEL, LaterGauge
from synapse.replication.tcp.commands import (
CancelTaskCommand,
ClearUserSyncsCommand,
Command,
FederationAckCommand,
@ -746,10 +747,17 @@ class ReplicationCommandHandler:
def on_NEW_ACTIVE_TASK(
self, conn: IReplicationConnection, cmd: NewActiveTaskCommand
) -> None:
"""Called when get a new NEW_ACTIVE_TASK command."""
"""Called when we get a new NEW_ACTIVE_TASK command."""
if self._task_scheduler:
self._task_scheduler.on_new_task(cmd.data)
async def on_CANCEL_TASK(
self, conn: IReplicationConnection, cmd: CancelTaskCommand
) -> None:
"""Called when we get a new CANCEL_TASK command."""
if self._task_scheduler:
await self._task_scheduler.on_cancel_task(cmd.data)
def new_connection(self, connection: IReplicationConnection) -> None:
"""Called when we have a new connection."""
self._connections.append(connection)
@ -872,6 +880,10 @@ class ReplicationCommandHandler:
"""Called when a new task has been scheduled for immediate launch and is ACTIVE."""
self.send_command(NewActiveTaskCommand(task_id))
def send_cancel_task(self, task_id: str) -> None:
"""Called when a scheduled task has been cancelled and should be terminated."""
self.send_command(CancelTaskCommand(task_id))
UpdateToken = TypeVar("UpdateToken")
UpdateRow = TypeVar("UpdateRow")

View file

@ -293,38 +293,6 @@ class ListMediaInRoom(RestServlet):
return HTTPStatus.OK, {"local": local_mxcs, "remote": remote_mxcs}
class ListQuarantinedMedia(RestServlet):
"""Lists all quarantined media on the server."""
PATTERNS = admin_patterns("/media/quarantined$")
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastores().main
self.auth = hs.get_auth()
async def on_GET(
self,
request: SynapseRequest,
) -> tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
start = parse_integer(request, "from", default=0)
limit = parse_integer(request, "limit", default=100)
local_or_remote = parse_string(request, "kind", required=True)
if local_or_remote not in ["local", "remote"]:
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"Query parameter `kind` must be either 'local' or 'remote'.",
)
mxcs = await self.store.get_quarantined_media_mxcs(
start, limit, local_or_remote == "local"
)
return HTTPStatus.OK, {"media": mxcs}
class PurgeMediaCacheRestServlet(RestServlet):
PATTERNS = admin_patterns("/purge_media_cache$")
@ -564,7 +532,6 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer)
ProtectMediaByID(hs).register(http_server)
UnprotectMediaByID(hs).register(http_server)
ListMediaInRoom(hs).register(http_server)
ListQuarantinedMedia(hs).register(http_server)
# XXX DeleteMediaByDateSize must be registered before DeleteMediaByID as
# their URL routes overlap.
DeleteMediaByDateSize(hs).register(http_server)

View file

@ -41,7 +41,12 @@ class ScheduledTasksRestServlet(RestServlet):
# extract query params
action_name = parse_string(request, "action_name")
resource_id = parse_string(request, "resource_id")
status = parse_string(request, "job_status")
status = parse_string(request, "status")
# This parameter was historically called `job_status`, while the Admin API docs
# defined it as `status`. We now support both, as `status` is generally
# a nicer name. A v2 of this endpoint should keep only `status`.
if status is None:
status = parse_string(request, "job_status")
max_timestamp = parse_integer(request, "max_timestamp")
actions = [action_name] if action_name else None

View file

@ -67,7 +67,11 @@ class AuthRestServlet(RestServlet):
if not session:
raise SynapseError(400, "No session supplied")
if stagetype == "org.matrix.cross_signing_reset":
# We support the unstable (`org.matrix.cross_signing_reset`) name from MSC4312 until
# enough clients have adopted the stable name (`m.oauth`).
# Note: `org.matrix.cross_signing_reset` *is* the stable name of the *action* in the
# authorization server metadata. The unstable status only applies to the UIA stage name.
if stagetype == "m.oauth" or stagetype == "org.matrix.cross_signing_reset":
if self.hs.config.mas.enabled:
assert isinstance(self.auth, MasDelegatedAuth)

View file

@ -560,9 +560,14 @@ class SigningKeyUploadServlet(RestServlet):
{
"session": "dummy",
"flows": [
{"stages": ["m.oauth"]},
# The unstable name from MSC4312 should be supported until enough clients have adopted the stable (`m.oauth`) name:
{"stages": ["org.matrix.cross_signing_reset"]},
],
"params": {
"m.oauth": {
"url": url,
},
"org.matrix.cross_signing_reset": {
"url": url,
},
@ -594,9 +599,14 @@ class SigningKeyUploadServlet(RestServlet):
{
"session": "dummy",
"flows": [
{"stages": ["m.oauth"]},
# The unstable name from MSC4312 should be supported until enough clients have adopted the stable (`m.oauth`) name:
{"stages": ["org.matrix.cross_signing_reset"]},
],
"params": {
"m.oauth": {
"url": url,
},
"org.matrix.cross_signing_reset": {
"url": url,
},

View file

@ -23,6 +23,7 @@ import logging
from typing import TYPE_CHECKING
from synapse.api.errors import SynapseError
from synapse.api.ratelimiting import Ratelimiter
from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet, parse_json_object_from_request
from synapse.http.site import SynapseRequest
@ -46,6 +47,12 @@ class UserDirectorySearchRestServlet(RestServlet):
self.auth = hs.get_auth()
self.user_directory_handler = hs.get_user_directory_handler()
self._per_user_limiter = Ratelimiter(
store=hs.get_datastores().main,
clock=hs.get_clock(),
cfg=hs.config.ratelimiting.rc_user_directory,
)
async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonMapping]:
"""Searches for users in directory
@ -69,6 +76,8 @@ class UserDirectorySearchRestServlet(RestServlet):
if not self.hs.config.userdirectory.user_directory_search_enabled:
return 200, {"limited": False, "results": []}
await self._per_user_limiter.ratelimit(requester)
body = parse_json_object_from_request(request)
limit = int(body.get("limit", 10))

View file

@ -60,15 +60,16 @@ class CreateResource(RestServlet):
# If the create media requests for the user are over the limit, drop them.
await self._create_media_rate_limiter.ratelimit(requester)
(
reached_pending_limit,
first_expiration_ts,
) = await self.media_repo.reached_pending_media_limit(requester.user)
if reached_pending_limit:
raise LimitExceededError(
limiter_name="max_pending_media_uploads",
retry_after_ms=first_expiration_ts - self.clock.time_msec(),
)
if not requester.app_service or requester.app_service.is_rate_limited():
(
reached_pending_limit,
first_expiration_ts,
) = await self.media_repo.reached_pending_media_limit(requester.user)
if reached_pending_limit:
raise LimitExceededError(
limiter_name="max_pending_media_uploads",
retry_after_ms=first_expiration_ts - self.clock.time_msec(),
)
content_uri, unused_expires_at = await self.media_repo.create_media_id(
requester.user

View file

@ -61,7 +61,6 @@ class LocalMedia:
url_cache: str | None
last_access_ts: int
quarantined_by: str | None
quarantined_ts: int | None
safe_from_quarantine: bool
user_id: str | None
authenticated: bool | None
@ -79,7 +78,6 @@ class RemoteMedia:
created_ts: int
last_access_ts: int
quarantined_by: str | None
quarantined_ts: int | None
authenticated: bool | None
sha256: str | None
@ -245,7 +243,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
"user_id",
"authenticated",
"sha256",
"quarantined_ts",
),
allow_none=True,
desc="get_local_media",
@ -265,7 +262,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
user_id=row[8],
authenticated=row[9],
sha256=row[10],
quarantined_ts=row[11],
)
async def get_local_media_by_user_paginate(
@ -323,8 +319,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
safe_from_quarantine,
user_id,
authenticated,
sha256,
quarantined_ts
sha256
FROM local_media_repository
WHERE user_id = ?
ORDER BY {order_by_column} {order}, media_id ASC
@ -350,7 +345,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
user_id=row[9],
authenticated=row[10],
sha256=row[11],
quarantined_ts=row[12],
)
for row in txn
]
@ -701,7 +695,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
"quarantined_by",
"authenticated",
"sha256",
"quarantined_ts",
),
allow_none=True,
desc="get_cached_remote_media",
@ -720,7 +713,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
quarantined_by=row[6],
authenticated=row[7],
sha256=row[8],
quarantined_ts=row[9],
)
async def store_cached_remote_media(

View file

@ -945,50 +945,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
max_lifetime=max_lifetime,
)
async def get_quarantined_media_mxcs(
self, index_start: int, index_limit: int, local: bool
) -> list[str]:
"""Retrieves all the quarantined media MXC URIs starting from the given position,
ordered from oldest quarantined timestamp, then alphabetically by media ID
(including origin).
Note that on established servers the "quarantined timestamp" may be zero due to
being introduced after the quarantine timestamp field was introduced.
Args:
index_start: The position to start from.
index_limit: The maximum number of results to return.
local: When true, only local media will be returned. When false, only remote media will be returned.
Returns:
The quarantined media as a list of media IDs.
"""
def _get_quarantined_media_mxcs_txn(
txn: LoggingTransaction,
) -> list[str]:
# We order by quarantined timestamp *and* media ID (including origin, when
# known) to ensure the ordering is stable for established servers.
if local:
sql = "SELECT '' as media_origin, media_id FROM local_media_repository WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_id ASC LIMIT ? OFFSET ?"
else:
sql = "SELECT media_origin, media_id FROM remote_media_cache WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_origin, media_id ASC LIMIT ? OFFSET ?"
txn.execute(sql, (index_limit, index_start))
mxcs = []
for media_origin, media_id in txn:
if local:
media_origin = self.hs.hostname
mxcs.append(f"mxc://{media_origin}/{media_id}")
return mxcs
return await self.db_pool.runInteraction(
"get_quarantined_media_mxcs",
_get_quarantined_media_mxcs_txn,
)
async def get_media_mxcs_in_room(self, room_id: str) -> tuple[list[str], list[str]]:
"""Retrieves all the local and remote media MXC URIs in a given room
@ -996,7 +952,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
room_id
Returns:
The local and remote media as lists of the media IDs.
The local and remote media as a lists of the media IDs.
"""
def _get_media_mxcs_in_room_txn(
@ -1191,10 +1147,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
The total number of media items quarantined
"""
total_media_quarantined = 0
now_ts: int | None = self.clock.time_msec()
if quarantined_by is None:
now_ts = None
# Effectively a legacy path, update any media that was explicitly named.
if media_ids:
@ -1203,13 +1155,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
)
sql = f"""
UPDATE local_media_repository
SET quarantined_by = ?, quarantined_ts = ?
SET quarantined_by = ?
WHERE {sql_many_clause_sql}"""
if quarantined_by is not None:
sql += " AND safe_from_quarantine = FALSE"
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
# Note that a rowcount of -1 can be used to indicate no rows were affected.
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
@ -1220,13 +1172,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
)
sql = f"""
UPDATE local_media_repository
SET quarantined_by = ?, quarantined_ts = ?
SET quarantined_by = ?
WHERE {sql_many_clause_sql}"""
if quarantined_by is not None:
sql += " AND safe_from_quarantine = FALSE"
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
return total_media_quarantined
@ -1250,10 +1202,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
The total number of media items quarantined
"""
total_media_quarantined = 0
now_ts: int | None = self.clock.time_msec()
if quarantined_by is None:
now_ts = None
if media:
sql_in_list_clause, sql_args = make_tuple_in_list_sql_clause(
@ -1263,10 +1211,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
)
sql = f"""
UPDATE remote_media_cache
SET quarantined_by = ?, quarantined_ts = ?
SET quarantined_by = ?
WHERE {sql_in_list_clause}"""
txn.execute(sql, [quarantined_by, now_ts] + sql_args)
txn.execute(sql, [quarantined_by] + sql_args)
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
total_media_quarantined = 0
@ -1276,9 +1224,9 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
)
sql = f"""
UPDATE remote_media_cache
SET quarantined_by = ?, quarantined_ts = ?
SET quarantined_by = ?
WHERE {sql_many_clause_sql}"""
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
return total_media_quarantined

View file

@ -1,27 +0,0 @@
--
-- This file is licensed under the Affero General Public License (AGPL) version 3.
--
-- Copyright (C) 2025 Element Creations, Ltd
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
--
-- See the GNU Affero General Public License for more details:
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
-- Add a timestamp for when the sliding sync connection position was last used,
-- only updated with a small granularity.
--
-- This should be NOT NULL, but we need to consider existing rows. In future we
-- may want to either backfill this or delete all rows with a NULL value (and
-- then make it NOT NULL).
ALTER TABLE local_media_repository ADD COLUMN quarantined_ts BIGINT;
ALTER TABLE remote_media_cache ADD COLUMN quarantined_ts BIGINT;
UPDATE local_media_repository SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
UPDATE remote_media_cache SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
-- Note: We *probably* should have an index on quarantined_ts, but we're going
-- to try to defer that to a future migration after seeing the performance impact.

View file

@ -1546,6 +1546,8 @@ class TaskStatus(str, Enum):
COMPLETE = "complete"
# Task is over and either returned a failed status, or had an exception
FAILED = "failed"
# Task has been cancelled
CANCELLED = "cancelled"
@attr.s(auto_attribs=True, frozen=True, slots=True)

View file

@ -22,6 +22,7 @@
import logging
from typing import TYPE_CHECKING, Awaitable, Callable
from twisted.internet import defer
from twisted.python.failure import Failure
from synapse.logging.context import (
@ -111,7 +112,8 @@ class TaskScheduler:
self.server_name = hs.hostname
self._store = hs.get_datastores().main
self._clock = hs.get_clock()
self._running_tasks: set[str] = set()
# A map between a task's ID and a deferred linked to the task
self._running_tasks: dict[str, defer.Deferred] = {}
# A map between action names and their registered function
self._actions: dict[
str,
@ -325,6 +327,37 @@ class TaskScheduler:
raise Exception(f"Task {id} is currently ACTIVE and can't be deleted")
await self._store.delete_scheduled_task(id)
async def cancel_task(self, id: str) -> None:
"""Cancel an ACTIVE or SCHEDULED task.
Args:
id: id of the task to cancel
"""
task = await self.get_task(id)
if not task:
logger.debug("Can't cancel task %s because it doesn't exist in the DB", id)
return
if not (
task.status == TaskStatus.ACTIVE or task.status == TaskStatus.SCHEDULED
):
logger.debug(
"Can't cancel task %s because it is neither ACTIVE nor SCHEDULED", id
)
return
if self._run_background_tasks:
await self.on_cancel_task(id)
else:
self.hs.get_replication_command_handler().send_cancel_task(id)
async def on_cancel_task(self, id: str) -> None:
if id in self._running_tasks:
deferred = self._running_tasks[id]
deferred.cancel()
self._running_tasks.pop(id)
await self.update_task(id, status=TaskStatus.CANCELLED)
def on_new_task(self, task_id: str) -> None:
"""Handle a notification that a new ready-to-run task has been added to the queue"""
# Just run the scheduler
@ -458,7 +491,7 @@ class TaskScheduler:
result=result,
error=error,
)
self._running_tasks.remove(task.id)
self._running_tasks.pop(task.id)
current_time = self._clock.time()
usage = log_context.get_resource_usage()
@ -489,6 +522,6 @@ class TaskScheduler:
if task.id in self._running_tasks:
return
self._running_tasks.add(task.id)
await self.update_task(task.id, status=TaskStatus.ACTIVE)
self.hs.run_as_background_process(f"task-{task.action}", wrapper)
deferred = self.hs.run_as_background_process(f"task-{task.action}", wrapper)
self._running_tasks[task.id] = deferred

View file

@ -75,7 +75,7 @@ _HISTORY_VIS_KEY: Final[tuple[str, str]] = (EventTypes.RoomHistoryVisibility, ""
@trace
async def filter_events_for_client(
async def filter_and_transform_events_for_client(
storage: StorageControllers,
user_id: str,
events: list[EventBase],

View file

@ -49,6 +49,7 @@ class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
hs.config.media.media_store_path = self.primary_base_path
local_provider = FileStorageProviderBackend(hs, self.primary_base_path)
storage_providers = [
StorageProviderWrapper(
FileStorageProviderBackend(hs, self.secondary_base_path),
@ -60,7 +61,7 @@ class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
self.filepaths = MediaFilePaths(self.primary_base_path)
self.media_storage = MediaStorage(
hs, self.primary_base_path, self.filepaths, storage_providers
hs, self.filepaths, storage_providers, local_provider
)
self.media_repo = hs.get_media_repository()
@ -187,7 +188,7 @@ class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
self.assertNotIn("body", channel.result)
class FederationThumbnailTest(unittest.FederatingHomeserverTestCase):
class FederationMediaTest(unittest.FederatingHomeserverTestCase):
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
super().prepare(reactor, clock, hs)
self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-")
@ -197,6 +198,7 @@ class FederationThumbnailTest(unittest.FederatingHomeserverTestCase):
hs.config.media.media_store_path = self.primary_base_path
local_provider = FileStorageProviderBackend(hs, self.primary_base_path)
storage_providers = [
StorageProviderWrapper(
FileStorageProviderBackend(hs, self.secondary_base_path),
@ -208,7 +210,116 @@ class FederationThumbnailTest(unittest.FederatingHomeserverTestCase):
self.filepaths = MediaFilePaths(self.primary_base_path)
self.media_storage = MediaStorage(
hs, self.primary_base_path, self.filepaths, storage_providers
hs, self.filepaths, storage_providers, local_provider
)
self.media_repo = hs.get_media_repository()
def test_thumbnail_download_scaled(self) -> None:
content = io.BytesIO(small_png.data)
content_uri = self.get_success(
self.media_repo.create_or_update_content(
"image/png",
"test_png_thumbnail",
content,
67,
UserID.from_string("@user_id:whatever.org"),
)
)
# test with an image file
channel = self.make_signed_federation_request(
"GET",
f"/_matrix/federation/v1/media/thumbnail/{content_uri.media_id}?width=32&height=32&method=scale",
)
self.pump()
self.assertEqual(200, channel.code)
content_type = channel.headers.getRawHeaders("content-type")
assert content_type is not None
assert "multipart/mixed" in content_type[0]
assert "boundary" in content_type[0]
# extract boundary
boundary = content_type[0].split("boundary=")[1]
# split on boundary and check that json field and expected value exist
body = channel.result.get("body")
assert body is not None
stripped_bytes = body.split(b"\r\n" + b"--" + boundary.encode("utf-8"))
found_json = any(
b"\r\nContent-Type: application/json\r\n\r\n{}" in field
for field in stripped_bytes
)
self.assertTrue(found_json)
# check that the png file exists and matches the expected scaled bytes
found_file = any(small_png.expected_scaled in field for field in stripped_bytes)
self.assertTrue(found_file)
def test_thumbnail_download_cropped(self) -> None:
content = io.BytesIO(small_png.data)
content_uri = self.get_success(
self.media_repo.create_or_update_content(
"image/png",
"test_png_thumbnail",
content,
67,
UserID.from_string("@user_id:whatever.org"),
)
)
# test with an image file
channel = self.make_signed_federation_request(
"GET",
f"/_matrix/federation/v1/media/thumbnail/{content_uri.media_id}?width=32&height=32&method=crop",
)
self.pump()
self.assertEqual(200, channel.code)
content_type = channel.headers.getRawHeaders("content-type")
assert content_type is not None
assert "multipart/mixed" in content_type[0]
assert "boundary" in content_type[0]
# extract boundary
boundary = content_type[0].split("boundary=")[1]
# split on boundary and check that json field and expected value exist
body = channel.result.get("body")
assert body is not None
stripped_bytes = body.split(b"\r\n" + b"--" + boundary.encode("utf-8"))
found_json = any(
b"\r\nContent-Type: application/json\r\n\r\n{}" in field
for field in stripped_bytes
)
self.assertTrue(found_json)
# check that the png file exists and matches the expected cropped bytes
found_file = any(
small_png.expected_cropped in field for field in stripped_bytes
)
self.assertTrue(found_file)
class FederationThumbnailTest(unittest.FederatingHomeserverTestCase):
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
super().prepare(reactor, clock, hs)
self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-")
self.addCleanup(shutil.rmtree, self.test_dir)
self.primary_base_path = os.path.join(self.test_dir, "primary")
self.secondary_base_path = os.path.join(self.test_dir, "secondary")
hs.config.media.media_store_path = self.primary_base_path
local_provider = FileStorageProviderBackend(hs, self.primary_base_path)
storage_providers = [
StorageProviderWrapper(
FileStorageProviderBackend(hs, self.secondary_base_path),
store_local=True,
store_remote=False,
store_synchronous=True,
)
]
self.filepaths = MediaFilePaths(self.primary_base_path)
self.media_storage = MediaStorage(
hs, self.filepaths, storage_providers, local_provider
)
self.media_repo = hs.get_media_repository()

Some files were not shown because too many files have changed in this diff Show more