mirror of
https://github.com/element-hq/synapse.git
synced 2026-01-16 23:00:43 +00:00
Merge remote-tracking branch 'origin/develop' into erikj/ratelimit_media_upload
This commit is contained in:
commit
e08187237e
199 changed files with 5526 additions and 1581 deletions
142
.github/workflows/docker.yml
vendored
142
.github/workflows/docker.yml
vendored
|
|
@ -5,7 +5,7 @@ name: Build docker images
|
|||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
branches: [master, main, develop]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
|
|
@ -14,23 +14,21 @@ permissions:
|
|||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
name: Build and push image for ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runs_on: ubuntu-24.04
|
||||
suffix: linux-amd64
|
||||
- platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
suffix: linux-arm64
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
|
@ -55,13 +53,79 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
images: |
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/element-hq/synapse
|
||||
file: "docker/Dockerfile"
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.suffix }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
name: Push merged images to ${{ matrix.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
repository:
|
||||
- docker.io/matrixdotorg/synapse
|
||||
- ghcr.io/element-hq/synapse
|
||||
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac # v3.9.1
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
|
|
@ -69,31 +133,23 @@ jobs:
|
|||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
type=sha
|
||||
|
||||
- name: Build and push all platforms
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# arm64 builds OOM without the git fetch setting. c.f.
|
||||
# https://github.com/rust-lang/cargo/issues/10583
|
||||
build-args: |
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
- name: Sign the images with GitHub OIDC Token
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
env:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
images=""
|
||||
for tag in ${TAGS}; do
|
||||
images+="${tag}@${DIGEST} "
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "$REPOSITORY@sha256:%s " *)
|
||||
|
||||
- name: Sign each manifest
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
DIGESTS=""
|
||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
||||
done
|
||||
cosign sign --yes ${images}
|
||||
cosign sign --yes $DIGESTS
|
||||
|
|
|
|||
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
|
|
@ -14,7 +14,7 @@ jobs:
|
|||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
|
|
|||
15
.github/workflows/fix_lint.yaml
vendored
15
.github/workflows/fix_lint.yaml
vendored
|
|
@ -6,6 +6,11 @@ name: Attempt to automatically fix linting errors
|
|||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
RUST_VERSION: nightly-2025-06-24
|
||||
|
||||
jobs:
|
||||
fixup:
|
||||
name: Fix up
|
||||
|
|
@ -16,13 +21,11 @@ jobs:
|
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
toolchain: nightly-2022-12-01
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
|
@ -44,6 +47,6 @@ jobs:
|
|||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0
|
||||
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
|
|
|
|||
21
.github/workflows/latest_deps.yml
vendored
21
.github/workflows/latest_deps.yml
vendored
|
|
@ -21,6 +21,9 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
|
|
@ -41,8 +44,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
|
|
@ -75,8 +80,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
|
|
@ -148,8 +155,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
|
|
|
|||
44
.github/workflows/release-artifacts.yml
vendored
44
.github/workflows/release-artifacts.yml
vendored
|
|
@ -30,7 +30,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
|
|
@ -61,7 +61,7 @@ jobs:
|
|||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
with:
|
||||
install: true
|
||||
|
||||
|
|
@ -76,7 +76,7 @@ jobs:
|
|||
- name: Set up python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
|
|
@ -107,12 +107,15 @@ jobs:
|
|||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||
name: Build wheels on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04, macos-13]
|
||||
arch: [x86_64, aarch64]
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
|
|
@ -122,12 +125,11 @@ jobs:
|
|||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
# Don't build aarch64 wheels on mac.
|
||||
- os: "macos-13"
|
||||
arch: aarch64
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
arch: aarch64
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
|
@ -139,21 +141,11 @@ jobs:
|
|||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.23.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Build aarch64 wheels
|
||||
if: matrix.arch == 'aarch64'
|
||||
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||
run: python -m pip install cibuildwheel==3.0.0
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
|
|
@ -161,13 +153,10 @@ jobs:
|
|||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
||||
name: Wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
|
|
@ -179,7 +168,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.10"
|
||||
|
||||
- run: pip install build
|
||||
|
||||
|
|
@ -191,7 +180,6 @@ jobs:
|
|||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
name: "Attach assets to release"
|
||||
|
|
|
|||
9
.github/workflows/schema.yaml
vendored
9
.github/workflows/schema.yaml
vendored
|
|
@ -5,6 +5,9 @@ on:
|
|||
paths:
|
||||
- schema/**
|
||||
- docs/usage/configuration/config_documentation.md
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
validate-schema:
|
||||
|
|
@ -12,7 +15,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
|
|
@ -38,7 +41,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
|
|
@ -51,4 +54,4 @@ jobs:
|
|||
> docs/usage/configuration/config_documentation.md
|
||||
- name: Error in case of any differences
|
||||
# Errors if there are now any modified files (untracked files are ignored).
|
||||
run: 'git diff || ! git status --porcelain=1 | grep "^ M"'
|
||||
run: 'git diff --exit-code'
|
||||
|
|
|
|||
72
.github/workflows/tests.yml
vendored
72
.github/workflows/tests.yml
vendored
|
|
@ -11,6 +11,9 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||
# don't modify Rust code.
|
||||
|
|
@ -85,8 +88,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
|
@ -149,8 +154,10 @@ jobs:
|
|||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
|
@ -210,8 +217,10 @@ jobs:
|
|||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
|
|
@ -227,10 +236,11 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
|
|
@ -245,11 +255,11 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
|
|
@ -262,12 +272,12 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
|
|
@ -362,8 +372,10 @@ jobs:
|
|||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
|
|
@ -404,8 +416,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
|
|
@ -519,8 +533,10 @@ jobs:
|
|||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
|
|
@ -663,8 +679,10 @@ jobs:
|
|||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
|
@ -695,8 +713,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: cargo test
|
||||
|
||||
|
|
@ -713,10 +733,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
|
|
|||
21
.github/workflows/twisted_trunk.yml
vendored
21
.github/workflows/twisted_trunk.yml
vendored
|
|
@ -20,6 +20,9 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
|
|
@ -43,8 +46,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
|
|
@ -69,8 +74,10 @@ jobs:
|
|||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
|
|
@ -113,8 +120,10 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
|
|
|
|||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -47,6 +47,7 @@ __pycache__/
|
|||
/.idea/
|
||||
/.ropeproject/
|
||||
/.vscode/
|
||||
/.zed/
|
||||
|
||||
# build products
|
||||
!/.coveragerc
|
||||
|
|
|
|||
182
CHANGES.md
182
CHANGES.md
|
|
@ -1,3 +1,185 @@
|
|||
# Synapse 1.134.0rc1 (2025-07-09)
|
||||
|
||||
### Features
|
||||
|
||||
- Support for [MSC4235](https://github.com/matrix-org/matrix-spec-proposals/pull/4235): `via` query param for hierarchy endpoint. Contributed by Krishan (@kfiven). ([\#18070](https://github.com/element-hq/synapse/issues/18070))
|
||||
- Add `forget_forced_upon_leave` capability as per [MSC4267](https://github.com/matrix-org/matrix-spec-proposals/pull/4267). ([\#18196](https://github.com/element-hq/synapse/issues/18196))
|
||||
- Add `federated_user_may_invite` spam checker callback which receives the entire invite event. Contributed by @tulir @ Beeper. ([\#18241](https://github.com/element-hq/synapse/issues/18241))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix `KeyError` on background updates when using split main/state databases. ([\#18509](https://github.com/element-hq/synapse/issues/18509))
|
||||
- Improve performance of device deletion by adding missing index. ([\#18582](https://github.com/element-hq/synapse/issues/18582))
|
||||
- Fix `avatar_url` and `displayname` being sent on federation profile queries when they are not set. ([\#18593](https://github.com/element-hq/synapse/issues/18593))
|
||||
- Respond with 401 & `M_USER_LOCKED` when a locked user calls `POST /login`, as per the spec. ([\#18594](https://github.com/element-hq/synapse/issues/18594))
|
||||
- Ensure policy servers are not asked to scan policy server change events, allowing rooms to disable the use of a policy server while the policy server is down. ([\#18605](https://github.com/element-hq/synapse/issues/18605))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fix documentation of the Delete Room Admin API's status field. ([\#18519](https://github.com/element-hq/synapse/issues/18519))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Stop adding the "origin" field to newly-created events (PDUs). ([\#18418](https://github.com/element-hq/synapse/issues/18418))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Replace `PyICU` crate with equivalent `icu_segmenter` Rust crate. ([\#18553](https://github.com/element-hq/synapse/issues/18553), [\#18646](https://github.com/element-hq/synapse/issues/18646))
|
||||
- Improve docstring on `simple_upsert_many`. ([\#18573](https://github.com/element-hq/synapse/issues/18573))
|
||||
- Raise poetry-core version cap to 2.1.3. ([\#18575](https://github.com/element-hq/synapse/issues/18575))
|
||||
- Raise setuptools_rust version cap to 1.11.1. ([\#18576](https://github.com/element-hq/synapse/issues/18576))
|
||||
- Better handling of ratelimited requests. ([\#18595](https://github.com/element-hq/synapse/issues/18595), [\#18600](https://github.com/element-hq/synapse/issues/18600))
|
||||
- Update to Rust 1.87.0 in CI, and bump the pinned commit of the `dtolnay/rust-toolchain` GitHub Action to `b3b07ba8b418998c39fb20f53e8b695cdcc8de1b`. ([\#18596](https://github.com/element-hq/synapse/issues/18596))
|
||||
- Speed up bulk device deletion. ([\#18602](https://github.com/element-hq/synapse/issues/18602))
|
||||
- Speed up the building of arm-based wheels in CI. ([\#18618](https://github.com/element-hq/synapse/issues/18618))
|
||||
- Speed up the building of Docker images in CI. ([\#18620](https://github.com/element-hq/synapse/issues/18620))
|
||||
- Add `.zed/` directory to `.gitignore`. ([\#18623](https://github.com/element-hq/synapse/issues/18623))
|
||||
- Log the room ID we're purging state for. ([\#18625](https://github.com/element-hq/synapse/issues/18625))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.7.8 to 2.8.0. ([\#18612](https://github.com/element-hq/synapse/issues/18612))
|
||||
* Bump attrs from 24.2.0 to 25.3.0. ([\#18649](https://github.com/element-hq/synapse/issues/18649))
|
||||
* Bump authlib from 1.5.2 to 1.6.0. ([\#18642](https://github.com/element-hq/synapse/issues/18642))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18589](https://github.com/element-hq/synapse/issues/18589))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18629](https://github.com/element-hq/synapse/issues/18629))
|
||||
* Bump docker/build-push-action from 6.17.0 to 6.18.0. ([\#18497](https://github.com/element-hq/synapse/issues/18497))
|
||||
* Bump docker/setup-buildx-action from 3.10.0 to 3.11.1. ([\#18587](https://github.com/element-hq/synapse/issues/18587))
|
||||
* Bump hiredis from 3.1.0 to 3.2.1. ([\#18638](https://github.com/element-hq/synapse/issues/18638))
|
||||
* Bump ijson from 3.3.0 to 3.4.0. ([\#18650](https://github.com/element-hq/synapse/issues/18650))
|
||||
* Bump jsonschema from 4.23.0 to 4.24.0. ([\#18630](https://github.com/element-hq/synapse/issues/18630))
|
||||
* Bump msgpack from 1.1.0 to 1.1.1. ([\#18651](https://github.com/element-hq/synapse/issues/18651))
|
||||
* Bump mypy-zope from 1.0.11 to 1.0.12. ([\#18640](https://github.com/element-hq/synapse/issues/18640))
|
||||
* Bump phonenumbers from 9.0.2 to 9.0.8. ([\#18652](https://github.com/element-hq/synapse/issues/18652))
|
||||
* Bump pillow from 11.2.1 to 11.3.0. ([\#18624](https://github.com/element-hq/synapse/issues/18624))
|
||||
* Bump prometheus-client from 0.21.0 to 0.22.1. ([\#18609](https://github.com/element-hq/synapse/issues/18609))
|
||||
* Bump pyasn1-modules from 0.4.1 to 0.4.2. ([\#18495](https://github.com/element-hq/synapse/issues/18495))
|
||||
* Bump pydantic from 2.11.4 to 2.11.7. ([\#18639](https://github.com/element-hq/synapse/issues/18639))
|
||||
* Bump reqwest from 0.12.15 to 0.12.20. ([\#18590](https://github.com/element-hq/synapse/issues/18590))
|
||||
* Bump reqwest from 0.12.20 to 0.12.22. ([\#18627](https://github.com/element-hq/synapse/issues/18627))
|
||||
* Bump ruff from 0.11.11 to 0.12.1. ([\#18645](https://github.com/element-hq/synapse/issues/18645))
|
||||
* Bump ruff from 0.12.1 to 0.12.2. ([\#18657](https://github.com/element-hq/synapse/issues/18657))
|
||||
* Bump sentry-sdk from 2.22.0 to 2.32.0. ([\#18633](https://github.com/element-hq/synapse/issues/18633))
|
||||
* Bump setuptools-rust from 1.10.2 to 1.11.1. ([\#18655](https://github.com/element-hq/synapse/issues/18655))
|
||||
* Bump sigstore/cosign-installer from 3.8.2 to 3.9.0. ([\#18588](https://github.com/element-hq/synapse/issues/18588))
|
||||
* Bump sigstore/cosign-installer from 3.9.0 to 3.9.1. ([\#18608](https://github.com/element-hq/synapse/issues/18608))
|
||||
* Bump stefanzweifel/git-auto-commit-action from 5.2.0 to 6.0.1. ([\#18607](https://github.com/element-hq/synapse/issues/18607))
|
||||
* Bump tokio from 1.45.1 to 1.46.0. ([\#18628](https://github.com/element-hq/synapse/issues/18628))
|
||||
* Bump tokio from 1.46.0 to 1.46.1. ([\#18667](https://github.com/element-hq/synapse/issues/18667))
|
||||
* Bump treq from 24.9.1 to 25.5.0. ([\#18610](https://github.com/element-hq/synapse/issues/18610))
|
||||
* Bump types-bleach from 6.2.0.20241123 to 6.2.0.20250514. ([\#18634](https://github.com/element-hq/synapse/issues/18634))
|
||||
* Bump types-jsonschema from 4.23.0.20250516 to 4.24.0.20250528. ([\#18611](https://github.com/element-hq/synapse/issues/18611))
|
||||
* Bump types-opentracing from 2.4.10.6 to 2.4.10.20250622. ([\#18586](https://github.com/element-hq/synapse/issues/18586))
|
||||
* Bump types-psycopg2 from 2.9.21.20250318 to 2.9.21.20250516. ([\#18658](https://github.com/element-hq/synapse/issues/18658))
|
||||
* Bump types-pyyaml from 6.0.12.20241230 to 6.0.12.20250516. ([\#18643](https://github.com/element-hq/synapse/issues/18643))
|
||||
* Bump types-setuptools from 75.2.0.20241019 to 80.9.0.20250529. ([\#18644](https://github.com/element-hq/synapse/issues/18644))
|
||||
* Bump typing-extensions from 4.12.2 to 4.14.0. ([\#18654](https://github.com/element-hq/synapse/issues/18654))
|
||||
* Bump typing-extensions from 4.14.0 to 4.14.1. ([\#18668](https://github.com/element-hq/synapse/issues/18668))
|
||||
* Bump urllib3 from 2.2.2 to 2.5.0. ([\#18572](https://github.com/element-hq/synapse/issues/18572))
|
||||
|
||||
# Synapse 1.133.0 (2025-07-01)
|
||||
|
||||
Pre-built wheels are now built using the [manylinux_2_28](https://github.com/pypa/manylinux#manylinux_2_28-almalinux-8-based) base, which is expected to be compatible with distros using glibc 2.28 or later, including:
|
||||
|
||||
- Debian 10+
|
||||
- Ubuntu 18.10+
|
||||
- Fedora 29+
|
||||
- CentOS/RHEL 8+
|
||||
|
||||
Previously, wheels were built using the [manylinux2014](https://github.com/pypa/manylinux#manylinux2014-centos-7-based-glibc-217) base, which was expected to be compatible with distros using glibc 2.17 or later.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Bump `cibuildwheel` to 3.0.0 to fix the `manylinux` wheel builds. ([\#18615](https://github.com/element-hq/synapse/issues/18615))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.133.0rc1 (2025-06-24)
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for the [MSC4260 user report API](https://github.com/matrix-org/matrix-spec-proposals/pull/4260). ([\#18120](https://github.com/element-hq/synapse/issues/18120))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix an issue where, during state resolution for v11 rooms, Synapse would incorrectly calculate the power level of the creator when there was no power levels event in the room. ([\#18534](https://github.com/element-hq/synapse/issues/18534), [\#18547](https://github.com/element-hq/synapse/issues/18547))
|
||||
- Fix long-standing bug where sliding sync did not honour the `room_id_to_include` config option. ([\#18535](https://github.com/element-hq/synapse/issues/18535))
|
||||
- Fix an issue where "Lock timeout is getting excessive" warnings would be logged even when the lock timeout was <10 minutes. ([\#18543](https://github.com/element-hq/synapse/issues/18543))
|
||||
- Fix an issue where Synapse could calculate the wrong power level for the creator of the room if there was no power levels event. ([\#18545](https://github.com/element-hq/synapse/issues/18545))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Generate config documentation from JSON Schema file. ([\#18528](https://github.com/element-hq/synapse/issues/18528))
|
||||
- Fix typo in user type documentation. ([\#18568](https://github.com/element-hq/synapse/issues/18568))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Increase performance of introspecting access tokens when using delegated auth. ([\#18357](https://github.com/element-hq/synapse/issues/18357), [\#18561](https://github.com/element-hq/synapse/issues/18561))
|
||||
- Log user deactivations. ([\#18541](https://github.com/element-hq/synapse/issues/18541))
|
||||
- Enable [`flake8-logging`](https://docs.astral.sh/ruff/rules/#flake8-logging-log) and [`flake8-logging-format`](https://docs.astral.sh/ruff/rules/#flake8-logging-format-g) rules in Ruff and fix related issues throughout the codebase. ([\#18542](https://github.com/element-hq/synapse/issues/18542))
|
||||
- Clean up old, unused rows from the `device_federation_inbox` table. ([\#18546](https://github.com/element-hq/synapse/issues/18546))
|
||||
- Run config schema CI on develop and release branches. ([\#18551](https://github.com/element-hq/synapse/issues/18551))
|
||||
- Add support for Twisted `25.5.0`+ releases. ([\#18577](https://github.com/element-hq/synapse/issues/18577))
|
||||
- Update PyO3 to version 0.25. ([\#18578](https://github.com/element-hq/synapse/issues/18578))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/setup-python from 5.5.0 to 5.6.0. ([\#18555](https://github.com/element-hq/synapse/issues/18555))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18559](https://github.com/element-hq/synapse/issues/18559))
|
||||
* Bump dawidd6/action-download-artifact from 9 to 11. ([\#18556](https://github.com/element-hq/synapse/issues/18556))
|
||||
* Bump headers from 0.4.0 to 0.4.1. ([\#18529](https://github.com/element-hq/synapse/issues/18529))
|
||||
* Bump requests from 2.32.2 to 2.32.4. ([\#18533](https://github.com/element-hq/synapse/issues/18533))
|
||||
* Bump types-requests from 2.32.0.20250328 to 2.32.4.20250611. ([\#18558](https://github.com/element-hq/synapse/issues/18558))
|
||||
|
||||
# Synapse 1.132.0 (2025-06-17)
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Improvements to generate config documentation from JSON Schema file. ([\#18522](https://github.com/element-hq/synapse/issues/18522))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.132.0rc1 (2025-06-10)
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for [MSC4155](https://github.com/matrix-org/matrix-spec-proposals/pull/4155) Invite Filtering. ([\#18288](https://github.com/element-hq/synapse/issues/18288))
|
||||
- Add experimental `user_may_send_state_event` module API callback. ([\#18455](https://github.com/element-hq/synapse/issues/18455))
|
||||
- Add experimental `get_media_config_for_user` and `is_user_allowed_to_upload_media_of_size` module API callbacks that allow overriding of media repository maximum upload size. ([\#18457](https://github.com/element-hq/synapse/issues/18457))
|
||||
- Add experimental `get_ratelimit_override_for_user` module API callback that allows overriding of per-user ratelimits. ([\#18458](https://github.com/element-hq/synapse/issues/18458))
|
||||
- Pass `room_config` argument to `user_may_create_room` spam checker module callback. ([\#18486](https://github.com/element-hq/synapse/issues/18486))
|
||||
- Support configuration of default and extra user types. ([\#18456](https://github.com/element-hq/synapse/issues/18456))
|
||||
- Successful requests to `/_matrix/app/v1/ping` will now force Synapse to reattempt delivering transactions to appservices. ([\#18521](https://github.com/element-hq/synapse/issues/18521))
|
||||
- Support the import of the `RatelimitOverride` type from `synapse.module_api` in modules and rename `messages_per_second` to `per_second`. ([\#18513](https://github.com/element-hq/synapse/issues/18513))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Remove destinations from sending if not whitelisted. ([\#18484](https://github.com/element-hq/synapse/issues/18484))
|
||||
- Fixed room summary API incorrectly returning that a room is private in the room summary response when the join rule is omitted by the remote server. Contributed by @nexy7574. ([\#18493](https://github.com/element-hq/synapse/issues/18493))
|
||||
- Prevent users from adding themselves to their own user ignore list. ([\#18508](https://github.com/element-hq/synapse/issues/18508))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Generate config documentation from JSON Schema file. ([\#17892](https://github.com/element-hq/synapse/issues/17892))
|
||||
- Mention `CAP_NET_BIND_SERVICE` as an alternative to running Synapse as root in order to bind to a privileged port. ([\#18408](https://github.com/element-hq/synapse/issues/18408))
|
||||
- Surface hidden Admin API documentation regarding fetching of scheduled tasks. ([\#18516](https://github.com/element-hq/synapse/issues/18516))
|
||||
- Mark the new module APIs in this release as experimental. ([\#18536](https://github.com/element-hq/synapse/issues/18536))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Mark dehydrated devices in the [List All User Devices Admin API](https://element-hq.github.io/synapse/latest/admin_api/user_admin_api.html#list-all-devices). ([\#18252](https://github.com/element-hq/synapse/issues/18252))
|
||||
- Reduce disk wastage by cleaning up `received_transactions` older than 1 day, rather than 30 days. ([\#18310](https://github.com/element-hq/synapse/issues/18310))
|
||||
- Distinguish all vs local events being persisted in the "Event Send Time Quantiles" graph (Grafana). ([\#18510](https://github.com/element-hq/synapse/issues/18510))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.131.0 (2025-06-03)
|
||||
|
||||
No significant changes since 1.131.0rc1.
|
||||
|
|
|
|||
1471
Cargo.lock
generated
1471
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1 +0,0 @@
|
|||
Generate config documentation from JSON Schema file.
|
||||
1
changelog.d/18195.feature
Normal file
1
changelog.d/18195.feature
Normal file
|
|
@ -0,0 +1 @@
|
|||
Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765).
|
||||
|
|
@ -1 +0,0 @@
|
|||
Mark dehydrated devices in the [List All User Devices Admin API](https://element-hq.github.io/synapse/latest/admin_api/user_admin_api.html#list-all-devices).
|
||||
1
changelog.d/18263.feature
Normal file
1
changelog.d/18263.feature
Normal file
|
|
@ -0,0 +1 @@
|
|||
Add experimental support for [MSC4277](https://github.com/matrix-org/matrix-spec-proposals/pull/4277).
|
||||
|
|
@ -1 +0,0 @@
|
|||
Add support for [MSC4155](https://github.com/matrix-org/matrix-spec-proposals/pull/4155) Invite Filtering.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Reduce disk wastage by cleaning up `received_transactions` older than 1 day, rather than 30 days.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Mention `CAP_NET_BIND_SERVICE` as an alternative to running Synapse as root in order to bind to a privileged port.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Add user_may_send_state_event callback to spam checker module API.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Support configuration of default and extra user types.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Add new module API callbacks that allows overriding of media repository maximum upload size.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Add a new module API callback that allows overriding of per user ratelimits.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Remove destinations from sending if not whitelisted.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Pass room_config argument to user_may_create_room spam checker module callback.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Prevent users from adding themselves to their own ignore list.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Distinguish all vs local events being persisted in the "Event Send Time Quantiles" graph (Grafana).
|
||||
|
|
@ -1 +0,0 @@
|
|||
Support the import of the `RatelimitOverride` type from `synapse.module_api` in modules and rename `messages_per_second` to `per_second`.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Surface hidden Admin API documentation regarding fetching of scheduled tasks.
|
||||
|
|
@ -1 +0,0 @@
|
|||
Successful requests to `/_matrix/app/v1/ping` will now force Synapse to reattempt delivering transactions to appservices.
|
||||
1
changelog.d/18672.misc
Normal file
1
changelog.d/18672.misc
Normal file
|
|
@ -0,0 +1 @@
|
|||
Minor speed up of insertion into `stream_positions` table.
|
||||
|
|
@ -45,6 +45,10 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||
|
||||
for pdu in pdus:
|
||||
# TODO: The "origin" field has since been removed from events generated
|
||||
# by Synapse. We should consider removing it here as well but since this
|
||||
# is part of `contrib/`, it is left for the community to revise and ensure things
|
||||
# still work correctly.
|
||||
origins.add(pdu.get("origin"))
|
||||
|
||||
color_map = {color: color for color in colors if color in origins}
|
||||
|
|
|
|||
30
debian/changelog
vendored
30
debian/changelog
vendored
|
|
@ -1,3 +1,33 @@
|
|||
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.134.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
|
||||
|
||||
matrix-synapse-py3 (1.133.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.133.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
|
||||
|
||||
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.133.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.132.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.132.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.132.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.131.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.131.0.
|
||||
|
|
|
|||
|
|
@ -117,7 +117,6 @@ It returns a JSON body like the following:
|
|||
"hashes": {
|
||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||
},
|
||||
"origin": "matrix.org",
|
||||
"origin_server_ts": 1592291711430,
|
||||
"prev_events": [
|
||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||
|
|
|
|||
|
|
@ -806,7 +806,7 @@ A response body like the following is returned:
|
|||
}, {
|
||||
"delete_id": "delete_id2",
|
||||
"room_id": "!roomid:example.com",
|
||||
"status": "purging",
|
||||
"status": "active",
|
||||
"shutdown_room": {
|
||||
"kicked_users": [
|
||||
"@foobar:example.com"
|
||||
|
|
@ -843,7 +843,7 @@ A response body like the following is returned:
|
|||
|
||||
```json
|
||||
{
|
||||
"status": "purging",
|
||||
"status": "active",
|
||||
"delete_id": "bHkCNQpHqOaFhPtK",
|
||||
"room_id": "!roomid:example.com",
|
||||
"shutdown_room": {
|
||||
|
|
@ -876,8 +876,8 @@ The following fields are returned in the JSON response body:
|
|||
- `delete_id` - The ID for this purge
|
||||
- `room_id` - The ID of the room being deleted
|
||||
- `status` - The status will be one of:
|
||||
- `shutting_down` - The process is removing users from the room.
|
||||
- `purging` - The process is purging the room and event data from database.
|
||||
- `scheduled` - The deletion is waiting to be started
|
||||
- `active` - The process is purging the room and event data from database.
|
||||
- `complete` - The process has completed successfully.
|
||||
- `failed` - The process is aborted, an error has occurred.
|
||||
- `error` - A string that shows an error message if `status` is `failed`.
|
||||
|
|
|
|||
|
|
@ -29,8 +29,6 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
|
|||
|
||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||
|
||||
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||
|
||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
|
|
|||
|
|
@ -164,10 +164,7 @@ $ poetry cache clear --all .
|
|||
# including the wheel artifacts which is not covered by the above command
|
||||
# (see https://github.com/python-poetry/poetry/issues/10304)
|
||||
#
|
||||
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
|
||||
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
|
||||
# in order to incorporate the correct dynamically linked library locations otherwise you
|
||||
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
|
||||
# This is necessary in order to rebuild or fetch new wheels.
|
||||
$ rm -rf $(poetry config cache-dir)
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,11 @@ _First introduced in Synapse v1.132.0_
|
|||
async def get_media_config_for_user(user_id: str) -> Optional[JsonDict]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental . The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when processing a request from a client for the
|
||||
[media config endpoint](https://spec.matrix.org/latest/client-server-api/#get_matrixclientv1mediaconfig).
|
||||
|
||||
|
|
@ -39,6 +44,11 @@ _First introduced in Synapse v1.132.0_
|
|||
async def is_user_allowed_to_upload_media_of_size(user_id: str, size: int) -> bool
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental . The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called before media is accepted for upload from a user, in case the module needs to
|
||||
enforce a different limit for the particular user.
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,11 @@ _First introduced in Synapse v1.132.0_
|
|||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> Optional[synapse.module_api.RatelimitOverride]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental . The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when constructing a ratelimiter of a particular type for a user. The module can
|
||||
return a `messages_per_second` and `burst_count` to be used, or `None` if
|
||||
the default settings are adequate. The user is represented by their Matrix user ID
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ Called when processing an invitation, both when one is created locally or when
|
|||
receiving an invite over federation. Both inviter and invitee are represented by
|
||||
their Matrix user ID (e.g. `@alice:example.com`).
|
||||
|
||||
Note that federated invites will call `federated_user_may_invite` before this callback.
|
||||
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
|
|
@ -97,6 +99,34 @@ be used. If this happens, Synapse will not call any of the subsequent implementa
|
|||
this callback.
|
||||
|
||||
|
||||
### `federated_user_may_invite`
|
||||
|
||||
_First introduced in Synapse v1.133.0_
|
||||
|
||||
```python
|
||||
async def federated_user_may_invite(event: "synapse.events.EventBase") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
Called when processing an invitation received over federation. Unlike `user_may_invite`,
|
||||
this callback receives the entire event, including any stripped state in the `unsigned`
|
||||
section, not just the room and user IDs.
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
decide to reject it.
|
||||
- `synapse.module_api.errors.Codes` to reject the operation with an error code. In case
|
||||
of doubt, `synapse.module_api.errors.Codes.FORBIDDEN` is a good error code.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
||||
this callback.
|
||||
|
||||
If all of the callbacks return `synapse.module_api.NOT_SPAM`, Synapse will also fall
|
||||
through to the `user_may_invite` callback before approving the invite.
|
||||
|
||||
|
||||
### `user_may_send_3pid_invite`
|
||||
|
||||
_First introduced in Synapse v1.45.0_
|
||||
|
|
@ -254,6 +284,11 @@ _First introduced in Synapse v1.132.0_
|
|||
async def user_may_send_state_event(user_id: str, room_id: str, event_type: str, state_key: str, content: JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental . The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when processing a request to [send state events](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey) to a room.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
|
|
|||
|
|
@ -286,7 +286,7 @@ Installing prerequisites on Ubuntu or Debian:
|
|||
```sh
|
||||
sudo apt install build-essential python3-dev libffi-dev \
|
||||
python3-pip python3-setuptools sqlite3 \
|
||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev libicu-dev
|
||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
||||
```
|
||||
|
||||
##### ArchLinux
|
||||
|
|
@ -295,7 +295,7 @@ Installing prerequisites on ArchLinux:
|
|||
|
||||
```sh
|
||||
sudo pacman -S base-devel python python-pip \
|
||||
python-setuptools python-virtualenv sqlite3 icu
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
```
|
||||
|
||||
##### CentOS/Fedora
|
||||
|
|
@ -305,8 +305,7 @@ Installing prerequisites on CentOS or Fedora Linux:
|
|||
```sh
|
||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
||||
python3-virtualenv libffi-devel openssl-devel python3-devel \
|
||||
libicu-devel
|
||||
python3-virtualenv libffi-devel openssl-devel python3-devel
|
||||
sudo dnf group install "Development Tools"
|
||||
```
|
||||
|
||||
|
|
@ -333,7 +332,7 @@ dnf install python3.12 python3.12-devel
|
|||
```
|
||||
Finally, install common prerequisites
|
||||
```bash
|
||||
dnf install libicu libicu-devel libpq5 libpq5-devel lz4 pkgconf
|
||||
dnf install libpq5 libpq5-devel lz4 pkgconf
|
||||
dnf group install "Development Tools"
|
||||
```
|
||||
###### Using venv module instead of virtualenv command
|
||||
|
|
@ -365,20 +364,6 @@ xcode-select --install
|
|||
|
||||
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
|
||||
|
||||
You may need to install icu, and make the icu binaries and libraries accessible.
|
||||
Please follow [the official instructions of PyICU](https://pypi.org/project/PyICU/) to do so.
|
||||
|
||||
If you're struggling to get icu discovered, and see:
|
||||
```
|
||||
RuntimeError:
|
||||
Please install pkg-config on your system or set the ICU_VERSION environment
|
||||
variable to the version of ICU you have installed.
|
||||
```
|
||||
despite it being installed and having your `PATH` updated, you can omit this dependency by
|
||||
not specifying `--extras all` to `poetry`. If using postgres, you can install Synapse via
|
||||
`poetry install --extras saml2 --extras oidc --extras postgres --extras opentracing --extras redis --extras sentry`.
|
||||
ICU is not a hard dependency on getting a working installation.
|
||||
|
||||
On ARM-based Macs you may also need to install libjpeg and libpq:
|
||||
```sh
|
||||
brew install jpeg libpq
|
||||
|
|
@ -400,8 +385,7 @@ Installing prerequisites on openSUSE:
|
|||
```sh
|
||||
sudo zypper in -t pattern devel_basis
|
||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel \
|
||||
libicu-devel
|
||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||
```
|
||||
|
||||
##### OpenBSD
|
||||
|
|
|
|||
|
|
@ -117,6 +117,13 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
|||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.134.0
|
||||
|
||||
## ICU bundled with Synapse
|
||||
|
||||
Synapse now uses the Rust `icu` library for improved user search. Installing the
|
||||
native ICU library on your system is no longer required.
|
||||
|
||||
# Upgrading to v1.130.0
|
||||
|
||||
## Documented endpoint which can be delegated to a federation worker
|
||||
|
|
|
|||
|
|
@ -764,22 +764,23 @@ max_event_delay_duration: 24h
|
|||
---
|
||||
### `user_types`
|
||||
|
||||
Configuration settings related to the user types feature.
|
||||
*(object)* Configuration settings related to the user types feature.
|
||||
|
||||
This setting has the following sub-options:
|
||||
* `default_user_type`: The default user type to use for registering new users when no value has been specified.
|
||||
Defaults to none.
|
||||
* `extra_user_types`: Array of additional user types to allow. These are treated as real users. Defaults to [].
|
||||
|
||||
* `default_user_type` (string|null): The default user type to use for registering new users when no value has been specified. Defaults to none. Defaults to `null`.
|
||||
|
||||
* `extra_user_types` (array): Array of additional user types to allow. These are treated as real users. Defaults to `[]`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
user_types:
|
||||
default_user_type: "custom"
|
||||
extra_user_types:
|
||||
- "custom"
|
||||
- "custom2"
|
||||
default_user_type: custom
|
||||
extra_user_types:
|
||||
- custom
|
||||
- custom2
|
||||
```
|
||||
|
||||
---
|
||||
## Homeserver blocking
|
||||
|
||||
Useful options for Synapse admins.
|
||||
|
|
@ -1936,6 +1937,33 @@ rc_delayed_event_mgmt:
|
|||
burst_count: 20.0
|
||||
```
|
||||
---
|
||||
### `rc_reports`
|
||||
|
||||
*(object)* Ratelimiting settings for reporting content.
|
||||
This is a ratelimiting option that ratelimits reports made by users about content they see.
|
||||
Setting this to a high value allows users to report content quickly, possibly in duplicate. This can result in higher database usage.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `per_second` (number): Maximum number of requests a client can send per second.
|
||||
|
||||
* `burst_count` (number): Maximum number of requests a client can send before being throttled.
|
||||
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_reports:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
rc_reports:
|
||||
per_second: 2.0
|
||||
burst_count: 20.0
|
||||
```
|
||||
---
|
||||
### `federation_rr_transactions_per_room_per_second`
|
||||
|
||||
*(integer)* Sets outgoing federation transaction frequency for sending read-receipts, per-room.
|
||||
|
|
|
|||
|
|
@ -77,14 +77,11 @@ The user provided search term is lowercased and normalized using [NFKC](https://
|
|||
this treats the string as case-insensitive, canonicalizes different forms of the
|
||||
same text, and maps some "roughly equivalent" characters together.
|
||||
|
||||
The search term is then split into words:
|
||||
|
||||
* If [ICU](https://en.wikipedia.org/wiki/International_Components_for_Unicode) is
|
||||
available, then the system's [default locale](https://unicode-org.github.io/icu/userguide/locale/#default-locales)
|
||||
will be used to break the search term into words. (See the
|
||||
[installation instructions](setup/installation.md) for how to install ICU.)
|
||||
* If unavailable, then runs of ASCII characters, numbers, underscores, and hyphens
|
||||
are considered words.
|
||||
The search term is then split into segments using the [`icu_segmenter`
|
||||
Rust crate](https://crates.io/crates/icu_segmenter). This crate ships with its
|
||||
own dictionary and Long Short Term-Memory (LSTM) machine learning models
|
||||
per-language to segment words. Read more [in the crate's
|
||||
documentation](https://docs.rs/icu/latest/icu/segmenter/struct.WordSegmenter.html#method.new_auto).
|
||||
|
||||
The queries for PostgreSQL and SQLite are detailed below, but their overall goal
|
||||
is to find matching users, preferring users who are "real" (e.g. not bots,
|
||||
|
|
|
|||
|
|
@ -96,7 +96,6 @@
|
|||
gnumake
|
||||
|
||||
# Native dependencies for running Synapse.
|
||||
icu
|
||||
libffi
|
||||
libjpeg
|
||||
libpqxx
|
||||
|
|
|
|||
1011
poetry.lock
generated
1011
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -74,6 +74,10 @@ select = [
|
|||
"PIE",
|
||||
# flake8-executable
|
||||
"EXE",
|
||||
# flake8-logging
|
||||
"LOG",
|
||||
# flake8-logging-format
|
||||
"G",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
|
|
@ -97,7 +101,7 @@ module-name = "synapse.synapse_rust"
|
|||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.131.0"
|
||||
version = "1.134.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
|
@ -250,7 +254,6 @@ hiredis = { version = "*", optional = true }
|
|||
Pympler = { version = "*", optional = true }
|
||||
parameterized = { version = ">=0.7.4", optional = true }
|
||||
idna = { version = ">=2.5", optional = true }
|
||||
pyicu = { version = ">=2.10.2", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||
|
|
@ -273,10 +276,6 @@ redis = ["txredisapi", "hiredis"]
|
|||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
# Allows for better search for international characters in the user directory. This
|
||||
# requires libicu's development headers installed on the system (e.g. libicu-dev on
|
||||
# Debian-based distributions).
|
||||
user-search = ["pyicu"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||
|
|
@ -308,8 +307,6 @@ all = [
|
|||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# improved user search
|
||||
"pyicu",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
|
|
@ -320,7 +317,7 @@ all = [
|
|||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.11.11"
|
||||
ruff = "0.12.2"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
|
|
@ -370,7 +367,7 @@ tomli = ">=1.2.3"
|
|||
# runtime errors caused by build system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
|
||||
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
|
|
@ -378,13 +375,10 @@ build-backend = "poetry.core.masonry.api"
|
|||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython 3.6, 3.7 and 3.8: EOLed
|
||||
# - PyPy 3.7 and 3.8: we only support Python 3.9+
|
||||
# - CPython and PyPy 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
||||
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||
skip = "cp38* pp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ name = "synapse"
|
|||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.66.0"
|
||||
rust-version = "1.81.0"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
|
|
@ -30,19 +30,28 @@ http = "1.1.0"
|
|||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
mime = "0.3.17"
|
||||
pyo3 = { version = "0.24.2", features = [
|
||||
pyo3 = { version = "0.25.1", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
"abi3-py39",
|
||||
] }
|
||||
pyo3-log = "0.12.0"
|
||||
pythonize = "0.24.0"
|
||||
pyo3-log = "0.12.4"
|
||||
pythonize = "0.25.0"
|
||||
regex = "1.6.0"
|
||||
sha2 = "0.10.8"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
serde_json = "1.0.85"
|
||||
ulid = "1.1.2"
|
||||
icu_segmenter = "2.0.0"
|
||||
reqwest = { version = "0.12.15", default-features = false, features = [
|
||||
"http2",
|
||||
"stream",
|
||||
"rustls-tls-native-roots",
|
||||
] }
|
||||
http-body-util = "0.1.3"
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.44.2", features = ["rt", "rt-multi-thread"] }
|
||||
|
||||
[features]
|
||||
extension-module = ["pyo3/extension-module"]
|
||||
|
|
|
|||
|
|
@ -58,3 +58,15 @@ impl NotFoundError {
|
|||
NotFoundError::new_err(())
|
||||
}
|
||||
}
|
||||
|
||||
import_exception!(synapse.api.errors, HttpResponseException);
|
||||
|
||||
impl HttpResponseException {
|
||||
pub fn new(status: StatusCode, bytes: Vec<u8>) -> pyo3::PyErr {
|
||||
HttpResponseException::new_err((
|
||||
status.as_u16(),
|
||||
status.canonical_reason().unwrap_or_default(),
|
||||
bytes,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
218
rust/src/http_client.rs
Normal file
218
rust/src/http_client.rs
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2025 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::{collections::HashMap, future::Future, panic::AssertUnwindSafe, sync::LazyLock};
|
||||
|
||||
use anyhow::Context;
|
||||
use futures::{FutureExt, TryStreamExt};
|
||||
use pyo3::{exceptions::PyException, prelude::*, types::PyString};
|
||||
use reqwest::RequestBuilder;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use crate::errors::HttpResponseException;
|
||||
|
||||
/// The tokio runtime that we're using to run async Rust libs.
|
||||
static RUNTIME: LazyLock<Runtime> = LazyLock::new(|| {
|
||||
tokio::runtime::Builder::new_multi_thread()
|
||||
.worker_threads(4)
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// A reference to the `Deferred` python class.
|
||||
static DEFERRED_CLASS: LazyLock<PyObject> = LazyLock::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
py.import("twisted.internet.defer")
|
||||
.expect("module 'twisted.internet.defer' should be importable")
|
||||
.getattr("Deferred")
|
||||
.expect("module 'twisted.internet.defer' should have a 'Deferred' class")
|
||||
.unbind()
|
||||
})
|
||||
});
|
||||
|
||||
/// A reference to the twisted `reactor`.
|
||||
static TWISTED_REACTOR: LazyLock<Py<PyModule>> = LazyLock::new(|| {
|
||||
Python::with_gil(|py| {
|
||||
py.import("twisted.internet.reactor")
|
||||
.expect("module 'twisted.internet.reactor' should be importable")
|
||||
.unbind()
|
||||
})
|
||||
});
|
||||
|
||||
/// Called when registering modules with python.
|
||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
let child_module: Bound<'_, PyModule> = PyModule::new(py, "http_client")?;
|
||||
child_module.add_class::<HttpClient>()?;
|
||||
|
||||
// Make sure we fail early if we can't build the lazy statics.
|
||||
LazyLock::force(&RUNTIME);
|
||||
LazyLock::force(&DEFERRED_CLASS);
|
||||
|
||||
m.add_submodule(&child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import acl` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.http_client", child_module)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
#[derive(Clone)]
|
||||
struct HttpClient {
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl HttpClient {
|
||||
#[new]
|
||||
pub fn py_new(user_agent: &str) -> PyResult<HttpClient> {
|
||||
// The twisted reactor can only be imported after Synapse has been
|
||||
// imported, to allow Synapse to change the twisted reactor. If we try
|
||||
// and import the reactor too early twisted installs a default reactor,
|
||||
// which can't be replaced.
|
||||
LazyLock::force(&TWISTED_REACTOR);
|
||||
|
||||
Ok(HttpClient {
|
||||
client: reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.build()
|
||||
.context("building reqwest client")?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get<'a>(
|
||||
&self,
|
||||
py: Python<'a>,
|
||||
url: String,
|
||||
response_limit: usize,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
self.send_request(py, self.client.get(url), response_limit)
|
||||
}
|
||||
|
||||
pub fn post<'a>(
|
||||
&self,
|
||||
py: Python<'a>,
|
||||
url: String,
|
||||
response_limit: usize,
|
||||
headers: HashMap<String, String>,
|
||||
request_body: String,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
let mut builder = self.client.post(url);
|
||||
for (name, value) in headers {
|
||||
builder = builder.header(name, value);
|
||||
}
|
||||
builder = builder.body(request_body);
|
||||
|
||||
self.send_request(py, builder, response_limit)
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
fn send_request<'a>(
|
||||
&self,
|
||||
py: Python<'a>,
|
||||
builder: RequestBuilder,
|
||||
response_limit: usize,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
create_deferred(py, async move {
|
||||
let response = builder.send().await.context("sending request")?;
|
||||
|
||||
let status = response.status();
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut buffer = Vec::new();
|
||||
while let Some(chunk) = stream.try_next().await.context("reading body")? {
|
||||
if buffer.len() + chunk.len() > response_limit {
|
||||
Err(anyhow::anyhow!("Response size too large"))?;
|
||||
}
|
||||
|
||||
buffer.extend_from_slice(&chunk);
|
||||
}
|
||||
|
||||
if !status.is_success() {
|
||||
return Err(HttpResponseException::new(status, buffer));
|
||||
}
|
||||
|
||||
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||
|
||||
Ok(r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a twisted deferred from the given future, spawning the task on the
|
||||
/// tokio runtime.
|
||||
///
|
||||
/// Does not handle deferred cancellation or contextvars.
|
||||
fn create_deferred<F, O>(py: Python, fut: F) -> PyResult<Bound<'_, PyAny>>
|
||||
where
|
||||
F: Future<Output = PyResult<O>> + Send + 'static,
|
||||
for<'a> O: IntoPyObject<'a>,
|
||||
{
|
||||
let deferred = DEFERRED_CLASS.bind(py).call0()?;
|
||||
let deferred_callback = deferred.getattr("callback")?.unbind();
|
||||
let deferred_errback = deferred.getattr("errback")?.unbind();
|
||||
|
||||
RUNTIME.spawn(async move {
|
||||
// TODO: Is it safe to assert unwind safety here? I think so, as we
|
||||
// don't use anything that could be tainted by the panic afterwards.
|
||||
// Note that `.spawn(..)` asserts unwind safety on the future too.
|
||||
let res = AssertUnwindSafe(fut).catch_unwind().await;
|
||||
|
||||
Python::with_gil(move |py| {
|
||||
// Flatten the panic into standard python error
|
||||
let res = match res {
|
||||
Ok(r) => r,
|
||||
Err(panic_err) => {
|
||||
let panic_message = get_panic_message(&panic_err);
|
||||
Err(PyException::new_err(
|
||||
PyString::new(py, panic_message).unbind(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
// Send the result to the deferred, via `.callback(..)` or `.errback(..)`
|
||||
match res {
|
||||
Ok(obj) => {
|
||||
TWISTED_REACTOR
|
||||
.call_method(py, "callFromThread", (deferred_callback, obj), None)
|
||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
||||
}
|
||||
Err(err) => {
|
||||
TWISTED_REACTOR
|
||||
.call_method(py, "callFromThread", (deferred_errback, err), None)
|
||||
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
Ok(deferred)
|
||||
}
|
||||
|
||||
/// Try and get the panic message out of the panic
|
||||
fn get_panic_message<'a>(panic_err: &'a (dyn std::any::Any + Send + 'static)) -> &'a str {
|
||||
// Apparently this is how you extract the panic message from a panic
|
||||
if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
||||
str_slice
|
||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
||||
string
|
||||
} else {
|
||||
"unknown error"
|
||||
}
|
||||
}
|
||||
|
|
@ -27,7 +27,7 @@ pub enum IdentifierError {
|
|||
|
||||
impl fmt::Display for IdentifierError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
write!(f, "{self:?}")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@ pub mod acl;
|
|||
pub mod errors;
|
||||
pub mod events;
|
||||
pub mod http;
|
||||
pub mod http_client;
|
||||
pub mod identifier;
|
||||
pub mod matrix_const;
|
||||
pub mod push;
|
||||
pub mod rendezvous;
|
||||
pub mod segmenter;
|
||||
|
||||
lazy_static! {
|
||||
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
||||
|
|
@ -50,7 +52,9 @@ fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|||
acl::register_module(py, m)?;
|
||||
push::register_module(py, m)?;
|
||||
events::register_module(py, m)?;
|
||||
http_client::register_module(py, m)?;
|
||||
rendezvous::register_module(py, m)?;
|
||||
segmenter::register_module(py, m)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
33
rust/src/segmenter.rs
Normal file
33
rust/src/segmenter.rs
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
use icu_segmenter::options::WordBreakInvariantOptions;
|
||||
use icu_segmenter::WordSegmenter;
|
||||
use pyo3::prelude::*;
|
||||
|
||||
#[pyfunction]
|
||||
pub fn parse_words(text: &str) -> PyResult<Vec<String>> {
|
||||
let segmenter = WordSegmenter::new_auto(WordBreakInvariantOptions::default());
|
||||
let mut parts = Vec::new();
|
||||
let mut last = 0usize;
|
||||
|
||||
// `segment_str` gives us word boundaries as a vector of indexes. Use that
|
||||
// to build a vector of words, and return.
|
||||
for boundary in segmenter.segment_str(text) {
|
||||
if boundary > last {
|
||||
parts.push(text[last..boundary].to_string());
|
||||
}
|
||||
last = boundary;
|
||||
}
|
||||
Ok(parts)
|
||||
}
|
||||
|
||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
let child_module = PyModule::new(py, "segmenter")?;
|
||||
child_module.add_function(wrap_pyfunction!(parse_words, m)?)?;
|
||||
|
||||
m.add_submodule(&child_module)?;
|
||||
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.segmenter", child_module)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/v1.131/schema/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.134/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
|
|
@ -912,6 +912,24 @@ properties:
|
|||
default: null
|
||||
examples:
|
||||
- 24h
|
||||
user_types:
|
||||
type: object
|
||||
description: >-
|
||||
Configuration settings related to the user types feature.
|
||||
properties:
|
||||
default_user_type:
|
||||
type: ["string", "null"]
|
||||
description: "The default user type to use for registering new users when no value has been specified. Defaults to none."
|
||||
default: null
|
||||
extra_user_types:
|
||||
type: array
|
||||
description: "Array of additional user types to allow. These are treated as real users."
|
||||
items:
|
||||
type: string
|
||||
default: []
|
||||
examples:
|
||||
- default_user_type: "custom"
|
||||
extra_user_types: ["custom", "custom2"]
|
||||
admin_contact:
|
||||
type: ["string", "null"]
|
||||
description: How to reach the server admin, used in `ResourceLimitError`.
|
||||
|
|
@ -2167,6 +2185,23 @@ properties:
|
|||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
rc_reports:
|
||||
$ref: "#/$defs/rc"
|
||||
description: >-
|
||||
Ratelimiting settings for reporting content.
|
||||
|
||||
This is a ratelimiting option that ratelimits reports made by users
|
||||
about content they see.
|
||||
|
||||
Setting this to a high value allows users to report content quickly, possibly in
|
||||
duplicate. This can result in higher database usage.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
federation_rr_transactions_per_room_per_second:
|
||||
type: integer
|
||||
description: >-
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@ def do_lint() -> Set[str]:
|
|||
importlib.import_module(module_info.name)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning(
|
||||
f"Bad annotation found when importing {module_info.name}"
|
||||
"Bad annotation found when importing %s", module_info.name
|
||||
)
|
||||
failures.add(format_model_checker_exception(e))
|
||||
|
||||
|
|
|
|||
|
|
@ -139,3 +139,6 @@ cargo-fmt
|
|||
|
||||
# Ensure type hints are correct.
|
||||
mypy
|
||||
|
||||
# Generate configuration documentation from the JSON Schema
|
||||
./scripts-dev/gen_config_documentation.py schema/synapse-config.schema.yaml > docs/usage/configuration/config_documentation.md
|
||||
|
|
|
|||
|
|
@ -37,7 +37,9 @@ from synapse.appservice import ApplicationService
|
|||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.state import CREATE_KEY, POWER_KEY
|
||||
from synapse.types import Requester, create_requester
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.cancellation import cancellable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
|
@ -216,18 +218,20 @@ class BaseAuth:
|
|||
# by checking if they would (theoretically) be able to change the
|
||||
# m.room.canonical_alias events
|
||||
|
||||
power_level_event = (
|
||||
await self._storage_controllers.state.get_current_state_event(
|
||||
room_id, EventTypes.PowerLevels, ""
|
||||
)
|
||||
auth_events = await self._storage_controllers.state.get_current_state(
|
||||
room_id,
|
||||
StateFilter.from_types(
|
||||
[
|
||||
POWER_KEY,
|
||||
CREATE_KEY,
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
auth_events = {}
|
||||
if power_level_event:
|
||||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||
|
||||
send_level = event_auth.get_send_level(
|
||||
EventTypes.CanonicalAlias, "", power_level_event
|
||||
EventTypes.CanonicalAlias,
|
||||
"",
|
||||
auth_events.get(POWER_KEY),
|
||||
)
|
||||
user_level = event_auth.get_user_power_level(
|
||||
requester.user.to_string(), auth_events
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ from synapse.api.errors import (
|
|||
InvalidClientTokenError,
|
||||
MissingClientTokenError,
|
||||
UnrecognizedRequestError,
|
||||
UserLockedError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
|
||||
|
|
@ -162,12 +163,7 @@ class InternalAuth(BaseAuth):
|
|||
if not allow_locked and await self.store.get_user_locked_status(
|
||||
requester.user.to_string()
|
||||
):
|
||||
raise AuthError(
|
||||
401,
|
||||
"User account has been locked",
|
||||
errcode=Codes.USER_LOCKED,
|
||||
additional_fields={"soft_logout": True},
|
||||
)
|
||||
raise UserLockedError()
|
||||
|
||||
# Deny the request if the user account has expired.
|
||||
# This check is only done for regular users, not appservice ones.
|
||||
|
|
|
|||
|
|
@ -30,9 +30,6 @@ from authlib.oauth2.rfc7662 import IntrospectionToken
|
|||
from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url
|
||||
from prometheus_client import Histogram
|
||||
|
||||
from twisted.web.client import readBody
|
||||
from twisted.web.http_headers import Headers
|
||||
|
||||
from synapse.api.auth.base import BaseAuth
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
|
|
@ -43,8 +40,14 @@ from synapse.api.errors import (
|
|||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
inject_request_headers,
|
||||
start_active_span,
|
||||
)
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
|
|
@ -179,6 +182,10 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
self._rust_http_client = HttpClient(
|
||||
user_agent=self._http_client.user_agent.decode("utf8")
|
||||
)
|
||||
|
||||
# # Token Introspection Cache
|
||||
# This remembers what users/devices are represented by which access tokens,
|
||||
# in order to reduce overall system load:
|
||||
|
|
@ -301,7 +308,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||
introspection_endpoint = await self._introspection_endpoint()
|
||||
raw_headers: Dict[str, str] = {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": str(self._http_client.user_agent, "utf-8"),
|
||||
"Accept": "application/json",
|
||||
# Tell MAS that we support reading the device ID as an explicit
|
||||
# value, not encoded in the scope. This is supported by MAS 0.15+
|
||||
|
|
@ -315,38 +321,34 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||
uri, raw_headers, body = self._client_auth.prepare(
|
||||
method="POST", uri=introspection_endpoint, headers=raw_headers, body=body
|
||||
)
|
||||
headers = Headers({k: [v] for (k, v) in raw_headers.items()})
|
||||
|
||||
# Do the actual request
|
||||
# We're not using the SimpleHttpClient util methods as we don't want to
|
||||
# check the HTTP status code, and we do the body encoding ourselves.
|
||||
|
||||
logger.debug("Fetching token from MAS")
|
||||
start_time = self._clock.time()
|
||||
try:
|
||||
response = await self._http_client.request(
|
||||
method="POST",
|
||||
uri=uri,
|
||||
data=body.encode("utf-8"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
resp_body = await make_deferred_yieldable(readBody(response))
|
||||
with start_active_span("mas-introspect-token"):
|
||||
inject_request_headers(raw_headers)
|
||||
with PreserveLoggingContext():
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=uri,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(e.code).observe(end_time - start_time)
|
||||
raise
|
||||
except Exception:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels("ERR").observe(end_time - start_time)
|
||||
raise
|
||||
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(response.code).observe(
|
||||
end_time - start_time
|
||||
)
|
||||
logger.debug("Fetched token from MAS")
|
||||
|
||||
if response.code < 200 or response.code >= 300:
|
||||
raise HttpResponseException(
|
||||
response.code,
|
||||
response.phrase.decode("ascii", errors="replace"),
|
||||
resp_body,
|
||||
)
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(200).observe(end_time - start_time)
|
||||
|
||||
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
||||
|
||||
|
|
@ -475,7 +477,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||
# XXX: This is a temporary solution so that the admin API can be called by
|
||||
# the OIDC provider. This will be removed once we have OIDC client
|
||||
# credentials grant support in matrix-authentication-service.
|
||||
logging.info("Admin toked used")
|
||||
logger.info("Admin toked used")
|
||||
# XXX: that user doesn't exist and won't be provisioned.
|
||||
# This is mostly fine for admin calls, but we should also think about doing
|
||||
# requesters without a user_id.
|
||||
|
|
|
|||
|
|
@ -262,6 +262,11 @@ class EventContentFields:
|
|||
|
||||
TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room"
|
||||
|
||||
# Used in m.room.topic events.
|
||||
TOPIC: Final = "topic"
|
||||
M_TOPIC: Final = "m.topic"
|
||||
M_TEXT: Final = "m.text"
|
||||
|
||||
|
||||
class EventUnsignedContentFields:
|
||||
"""Fields found inside the 'unsigned' data on events"""
|
||||
|
|
@ -270,6 +275,13 @@ class EventUnsignedContentFields:
|
|||
MEMBERSHIP: Final = "membership"
|
||||
|
||||
|
||||
class MTextFields:
|
||||
"""Fields found inside m.text content blocks."""
|
||||
|
||||
BODY: Final = "body"
|
||||
MIMETYPE: Final = "mimetype"
|
||||
|
||||
|
||||
class RoomTypes:
|
||||
"""Understood values of the room_type field of m.room.create events."""
|
||||
|
||||
|
|
|
|||
|
|
@ -306,6 +306,20 @@ class UserDeactivatedError(SynapseError):
|
|||
)
|
||||
|
||||
|
||||
class UserLockedError(SynapseError):
|
||||
"""The error returned to the client when the user attempted to access an
|
||||
authenticated endpoint, but the account has been locked.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
code=HTTPStatus.UNAUTHORIZED,
|
||||
msg="User account has been locked",
|
||||
errcode=Codes.USER_LOCKED,
|
||||
additional_fields={"soft_logout": True},
|
||||
)
|
||||
|
||||
|
||||
class FederationDeniedError(SynapseError):
|
||||
"""An error raised when the server tries to federate with a server which
|
||||
is not on its federation whitelist.
|
||||
|
|
@ -527,7 +541,11 @@ class InvalidCaptchaError(SynapseError):
|
|||
|
||||
|
||||
class LimitExceededError(SynapseError):
|
||||
"""A client has sent too many requests and is being throttled."""
|
||||
"""A client has sent too many requests and is being throttled.
|
||||
|
||||
Args:
|
||||
pause: Optional time in seconds to pause before responding to the client.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -535,6 +553,7 @@ class LimitExceededError(SynapseError):
|
|||
code: int = 429,
|
||||
retry_after_ms: Optional[int] = None,
|
||||
errcode: str = Codes.LIMIT_EXCEEDED,
|
||||
pause: Optional[float] = None,
|
||||
):
|
||||
# Use HTTP header Retry-After to enable library-assisted retry handling.
|
||||
headers = (
|
||||
|
|
@ -545,6 +564,7 @@ class LimitExceededError(SynapseError):
|
|||
super().__init__(code, "Too Many Requests", errcode, headers=headers)
|
||||
self.retry_after_ms = retry_after_ms
|
||||
self.limiter_name = limiter_name
|
||||
self.pause = pause
|
||||
|
||||
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||
|
|
|
|||
|
|
@ -338,12 +338,10 @@ class Ratelimiter:
|
|||
)
|
||||
|
||||
if not allowed:
|
||||
if pause:
|
||||
await self.clock.sleep(pause)
|
||||
|
||||
raise LimitExceededError(
|
||||
limiter_name=self._limiter_name,
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now_s)),
|
||||
pause=pause,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -445,8 +445,8 @@ def listen_http(
|
|||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on Unix Socket at: "
|
||||
f"{ports[0].getHost().name.decode('utf-8')}"
|
||||
"Synapse now listening on Unix Socket at: %s",
|
||||
ports[0].getHost().name.decode("utf-8"),
|
||||
)
|
||||
|
||||
return ports
|
||||
|
|
|
|||
|
|
@ -28,15 +28,13 @@ from prometheus_client import Gauge
|
|||
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.constants import ONE_HOUR_SECONDS, ONE_MINUTE_SECONDS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
ONE_MINUTE_SECONDS = 60
|
||||
ONE_HOUR_SECONDS = 60 * ONE_MINUTE_SECONDS
|
||||
|
||||
MILLISECONDS_PER_SECOND = 1000
|
||||
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS
|
||||
|
|
@ -173,7 +171,7 @@ async def phone_stats_home(
|
|||
stats["log_level"] = logging.getLevelName(log_level)
|
||||
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s" % (hs.config.metrics.report_stats_endpoint, stats)
|
||||
"Reporting stats to %s: %s", hs.config.metrics.report_stats_endpoint, stats
|
||||
)
|
||||
try:
|
||||
await hs.get_proxied_http_client().put_json(
|
||||
|
|
|
|||
|
|
@ -461,7 +461,7 @@ class _TransactionController:
|
|||
recoverer = self.recoverers.get(service.id)
|
||||
if not recoverer:
|
||||
# No need to force a retry on a happy AS.
|
||||
logger.info(f"{service.id} is not in recovery, not forcing retry")
|
||||
logger.info("%s is not in recovery, not forcing retry", service.id)
|
||||
return
|
||||
|
||||
recoverer.force_retry()
|
||||
|
|
|
|||
|
|
@ -561,11 +561,23 @@ class ExperimentalConfig(Config):
|
|||
# MSC4076: Add `disable_badge_count`` to pusher configuration
|
||||
self.msc4076_enabled: bool = experimental.get("msc4076_enabled", False)
|
||||
|
||||
# MSC4277: Harmonizing the reporting endpoints
|
||||
#
|
||||
# If enabled, ignore the score parameter and respond with HTTP 200 on
|
||||
# reporting requests regardless of the subject's existence.
|
||||
self.msc4277_enabled: bool = experimental.get("msc4277_enabled", False)
|
||||
|
||||
# MSC4235: Add `via` param to hierarchy endpoint
|
||||
self.msc4235_enabled: bool = experimental.get("msc4235_enabled", False)
|
||||
|
||||
# MSC4263: Preventing MXID enumeration via key queries
|
||||
self.msc4263_limit_key_queries_to_users_who_share_rooms = experimental.get(
|
||||
"msc4263_limit_key_queries_to_users_who_share_rooms",
|
||||
False,
|
||||
)
|
||||
|
||||
# MSC4267: Automatically forgetting rooms on leave
|
||||
self.msc4267_enabled: bool = experimental.get("msc4267_enabled", False)
|
||||
|
||||
# MSC4155: Invite filtering
|
||||
self.msc4155_enabled: bool = experimental.get("msc4155_enabled", False)
|
||||
|
|
|
|||
|
|
@ -51,6 +51,8 @@ if TYPE_CHECKING:
|
|||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_LOG_CONFIG = Template(
|
||||
"""\
|
||||
# Log configuration for Synapse.
|
||||
|
|
@ -291,7 +293,7 @@ def _load_logging_config(log_config_path: str) -> None:
|
|||
log_config = yaml.safe_load(f.read())
|
||||
|
||||
if not log_config:
|
||||
logging.warning("Loaded a blank logging config?")
|
||||
logger.warning("Loaded a blank logging config?")
|
||||
|
||||
# If the old structured logging configuration is being used, raise an error.
|
||||
if "structured" in log_config and log_config.get("structured"):
|
||||
|
|
@ -312,7 +314,7 @@ def _reload_logging_config(log_config_path: Optional[str]) -> None:
|
|||
return
|
||||
|
||||
_load_logging_config(log_config_path)
|
||||
logging.info("Reloaded log config from %s due to SIGHUP", log_config_path)
|
||||
logger.info("Reloaded log config from %s due to SIGHUP", log_config_path)
|
||||
|
||||
|
||||
def setup_logging(
|
||||
|
|
@ -349,17 +351,17 @@ def setup_logging(
|
|||
appbase.register_sighup(_reload_logging_config, log_config_path)
|
||||
|
||||
# Log immediately so we can grep backwards.
|
||||
logging.warning("***** STARTING SERVER *****")
|
||||
logging.warning(
|
||||
logger.warning("***** STARTING SERVER *****")
|
||||
logger.warning(
|
||||
"Server %s version %s",
|
||||
sys.argv[0],
|
||||
SYNAPSE_VERSION,
|
||||
)
|
||||
logging.warning("Copyright (c) 2023 New Vector, Inc")
|
||||
logging.warning(
|
||||
logger.warning("Copyright (c) 2023 New Vector, Inc")
|
||||
logger.warning(
|
||||
"Licensed under the AGPL 3.0 license. Website: https://github.com/element-hq/synapse"
|
||||
)
|
||||
logging.info("Server hostname: %s", config.server.server_name)
|
||||
logging.info("Public Base URL: %s", config.server.public_baseurl)
|
||||
logging.info("Instance name: %s", hs.get_instance_name())
|
||||
logging.info("Twisted reactor: %s", type(reactor).__name__)
|
||||
logger.info("Server hostname: %s", config.server.server_name)
|
||||
logger.info("Public Base URL: %s", config.server.public_baseurl)
|
||||
logger.info("Instance name: %s", hs.get_instance_name())
|
||||
logger.info("Twisted reactor: %s", type(reactor).__name__)
|
||||
|
|
|
|||
|
|
@ -240,3 +240,9 @@ class RatelimitConfig(Config):
|
|||
"rc_delayed_event_mgmt",
|
||||
defaults={"per_second": 1, "burst_count": 5},
|
||||
)
|
||||
|
||||
self.rc_reports = RatelimitSettings.parse(
|
||||
config,
|
||||
"rc_reports",
|
||||
defaults={"per_second": 1, "burst_count": 5},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ from synapse.types import JsonDict
|
|||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
logger = logging.Logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RoomDefaultEncryptionTypes:
|
||||
|
|
@ -85,4 +85,4 @@ class RoomConfig(Config):
|
|||
|
||||
# When enabled, users will forget rooms when they leave them, either via a
|
||||
# leave, kick or ban.
|
||||
self.forget_on_leave = config.get("forget_rooms_on_leave", False)
|
||||
self.forget_on_leave: bool = config.get("forget_rooms_on_leave", False)
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ from synapse.util.stringutils import parse_and_validate_server_name
|
|||
from ._base import Config, ConfigError
|
||||
from ._util import validate_config
|
||||
|
||||
logger = logging.Logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DIRECT_TCP_ERROR = """
|
||||
Using direct TCP replication for workers is no longer supported.
|
||||
|
|
|
|||
|
|
@ -27,8 +27,6 @@ from typing import Any, Dict, List, Optional, Union
|
|||
import attr
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel,
|
||||
Extra,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
|
|
@ -47,6 +45,7 @@ from synapse.config.server import (
|
|||
parse_listener_def,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.pydantic_models import ParseModel
|
||||
|
||||
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
||||
The '%s' configuration option is deprecated and will be removed in a future
|
||||
|
|
@ -90,30 +89,7 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
|
|||
return obj
|
||||
|
||||
|
||||
class ConfigModel(BaseModel):
|
||||
"""A custom version of Pydantic's BaseModel which
|
||||
|
||||
- ignores unknown fields and
|
||||
- does not allow fields to be overwritten after construction,
|
||||
|
||||
but otherwise uses Pydantic's default behaviour.
|
||||
|
||||
For now, ignore unknown fields. In the future, we could change this so that unknown
|
||||
config values cause a ValidationError, provided the error messages are meaningful to
|
||||
server operators.
|
||||
|
||||
Subclassing in this way is recommended by
|
||||
https://pydantic-docs.helpmanual.io/usage/model_config/#change-behaviour-globally
|
||||
"""
|
||||
|
||||
class Config:
|
||||
# By default, ignore fields that we don't recognise.
|
||||
extra = Extra.ignore
|
||||
# By default, don't allow fields to be reassigned after parsing.
|
||||
allow_mutation = False
|
||||
|
||||
|
||||
class InstanceTcpLocationConfig(ConfigModel):
|
||||
class InstanceTcpLocationConfig(ParseModel):
|
||||
"""The host and port to talk to an instance via HTTP replication."""
|
||||
|
||||
host: StrictStr
|
||||
|
|
@ -129,7 +105,7 @@ class InstanceTcpLocationConfig(ConfigModel):
|
|||
return f"{self.host}:{self.port}"
|
||||
|
||||
|
||||
class InstanceUnixLocationConfig(ConfigModel):
|
||||
class InstanceUnixLocationConfig(ParseModel):
|
||||
"""The socket file to talk to an instance via HTTP replication."""
|
||||
|
||||
path: StrictStr
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ from synapse.api.room_versions import (
|
|||
RoomVersion,
|
||||
RoomVersions,
|
||||
)
|
||||
from synapse.state import CREATE_KEY
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.types import (
|
||||
MutableStateMap,
|
||||
|
|
@ -308,6 +309,13 @@ def check_state_dependent_auth_rules(
|
|||
|
||||
auth_dict = {(e.type, e.state_key): e for e in auth_events}
|
||||
|
||||
# Later code relies on there being a create event e.g _can_federate, _is_membership_change_allowed
|
||||
# so produce a more intelligible error if we don't have one.
|
||||
if auth_dict.get(CREATE_KEY) is None:
|
||||
raise AuthError(
|
||||
403, f"Event {event.event_id} is missing a create event in auth_events."
|
||||
)
|
||||
|
||||
# additional check for m.federate
|
||||
creating_domain = get_domain_from_id(event.room_id)
|
||||
originating_domain = get_domain_from_id(event.sender)
|
||||
|
|
@ -1010,11 +1018,16 @@ def get_user_power_level(user_id: str, auth_events: StateMap["EventBase"]) -> in
|
|||
user_id: user's id to look up in power_levels
|
||||
auth_events:
|
||||
state in force at this point in the room (or rather, a subset of
|
||||
it including at least the create event and power levels event.
|
||||
it including at least the create event, and possibly a power levels event).
|
||||
|
||||
Returns:
|
||||
the user's power level in this room.
|
||||
"""
|
||||
create_event = auth_events.get(CREATE_KEY)
|
||||
assert create_event is not None, (
|
||||
"A create event in the auth events chain is required to calculate user power level correctly,"
|
||||
" but was not found. This indicates a bug"
|
||||
)
|
||||
power_level_event = get_power_level_event(auth_events)
|
||||
if power_level_event:
|
||||
level = power_level_event.content.get("users", {}).get(user_id)
|
||||
|
|
@ -1028,18 +1041,12 @@ def get_user_power_level(user_id: str, auth_events: StateMap["EventBase"]) -> in
|
|||
else:
|
||||
# if there is no power levels event, the creator gets 100 and everyone
|
||||
# else gets 0.
|
||||
|
||||
# some things which call this don't pass the create event: hack around
|
||||
# that.
|
||||
key = (EventTypes.Create, "")
|
||||
create_event = auth_events.get(key)
|
||||
if create_event is not None:
|
||||
if create_event.room_version.implicit_room_creator:
|
||||
creator = create_event.sender
|
||||
else:
|
||||
creator = create_event.content[EventContentFields.ROOM_CREATOR]
|
||||
if creator == user_id:
|
||||
return 100
|
||||
if create_event.room_version.implicit_room_creator:
|
||||
creator = create_event.sender
|
||||
else:
|
||||
creator = create_event.content[EventContentFields.ROOM_CREATOR]
|
||||
if creator == user_id:
|
||||
return 100
|
||||
return 0
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -208,7 +208,6 @@ class EventBase(metaclass=abc.ABCMeta):
|
|||
depth: DictProperty[int] = DictProperty("depth")
|
||||
content: DictProperty[JsonDict] = DictProperty("content")
|
||||
hashes: DictProperty[Dict[str, str]] = DictProperty("hashes")
|
||||
origin: DictProperty[str] = DictProperty("origin")
|
||||
origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts")
|
||||
room_id: DictProperty[str] = DictProperty("room_id")
|
||||
sender: DictProperty[str] = DictProperty("sender")
|
||||
|
|
|
|||
|
|
@ -195,15 +195,18 @@ class InviteAutoAccepter:
|
|||
except SynapseError as e:
|
||||
if e.code == HTTPStatus.FORBIDDEN:
|
||||
logger.debug(
|
||||
f"Update_room_membership was forbidden. This can sometimes be expected for remote invites. Exception: {e}"
|
||||
"Update_room_membership was forbidden. This can sometimes be expected for remote invites. Exception: %s",
|
||||
e,
|
||||
)
|
||||
else:
|
||||
logger.warn(
|
||||
f"Update_room_membership raised the following unexpected (SynapseError) exception: {e}"
|
||||
logger.warning(
|
||||
"Update_room_membership raised the following unexpected (SynapseError) exception: %s",
|
||||
e,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warn(
|
||||
f"Update_room_membership raised the following unexpected exception: {e}"
|
||||
logger.warning(
|
||||
"Update_room_membership raised the following unexpected exception: %s",
|
||||
e,
|
||||
)
|
||||
|
||||
sleep = 2**retries
|
||||
|
|
|
|||
|
|
@ -302,8 +302,8 @@ def create_local_event_from_event_dict(
|
|||
event_dict: JsonDict,
|
||||
internal_metadata_dict: Optional[JsonDict] = None,
|
||||
) -> EventBase:
|
||||
"""Takes a fully formed event dict, ensuring that fields like `origin`
|
||||
and `origin_server_ts` have correct values for a locally produced event,
|
||||
"""Takes a fully formed event dict, ensuring that fields like
|
||||
`origin_server_ts` have correct values for a locally produced event,
|
||||
then signs and hashes it.
|
||||
"""
|
||||
|
||||
|
|
@ -319,7 +319,6 @@ def create_local_event_from_event_dict(
|
|||
if format_version == EventFormatVersions.ROOM_V1_V2:
|
||||
event_dict["event_id"] = _create_event_id(clock, hostname)
|
||||
|
||||
event_dict["origin"] = hostname
|
||||
event_dict.setdefault("origin_server_ts", time_now)
|
||||
|
||||
event_dict.setdefault("unsigned", {})
|
||||
|
|
|
|||
|
|
@ -67,7 +67,6 @@ class EventValidator:
|
|||
"auth_events",
|
||||
"content",
|
||||
"hashes",
|
||||
"origin",
|
||||
"prev_events",
|
||||
"sender",
|
||||
"type",
|
||||
|
|
@ -77,13 +76,6 @@ class EventValidator:
|
|||
if k not in event:
|
||||
raise SynapseError(400, "Event does not have key %s" % (k,))
|
||||
|
||||
# Check that the following keys have string values
|
||||
event_strings = ["origin"]
|
||||
|
||||
for s in event_strings:
|
||||
if not isinstance(getattr(event, s), str):
|
||||
raise SynapseError(400, "'%s' not a string type" % (s,))
|
||||
|
||||
# Depending on the room version, ensure the data is spec compliant JSON.
|
||||
if event.room_version.strict_canonicaljson:
|
||||
validate_canonicaljson(event.get_pdu_json())
|
||||
|
|
|
|||
|
|
@ -322,8 +322,7 @@ def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventB
|
|||
SynapseError: if the pdu is missing required fields or is otherwise
|
||||
not a valid matrix event
|
||||
"""
|
||||
# we could probably enforce a bunch of other fields here (room_id, sender,
|
||||
# origin, etc etc)
|
||||
# we could probably enforce a bunch of other fields here (room_id, sender, etc.)
|
||||
assert_params_in_dict(pdu_json, ("type", "depth"))
|
||||
|
||||
# Strip any unauthorized values from "unsigned" if they exist
|
||||
|
|
|
|||
|
|
@ -1818,7 +1818,7 @@ class FederationClient(FederationBase):
|
|||
)
|
||||
return timestamp_to_event_response
|
||||
except SynapseError as e:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"timestamp_to_event(room_id=%s, timestamp=%s, direction=%s): encountered error when trying to fetch from destinations: %s",
|
||||
room_id,
|
||||
timestamp,
|
||||
|
|
|
|||
|
|
@ -928,7 +928,8 @@ class FederationServer(FederationBase):
|
|||
# joins) or the full state (for full joins).
|
||||
# Return a 404 as we would if we weren't in the room at all.
|
||||
logger.info(
|
||||
f"Rejecting /send_{membership_type} to %s because it's a partial state room",
|
||||
"Rejecting /send_%s to %s because it's a partial state room",
|
||||
membership_type,
|
||||
room_id,
|
||||
)
|
||||
raise SynapseError(
|
||||
|
|
|
|||
|
|
@ -495,7 +495,7 @@ class AdminHandler:
|
|||
)
|
||||
except Exception as ex:
|
||||
logger.info(
|
||||
f"Redaction of event {event.event_id} failed due to: {ex}"
|
||||
"Redaction of event %s failed due to: %s", event.event_id, ex
|
||||
)
|
||||
result["failed_redactions"][event.event_id] = str(ex)
|
||||
await self._task_scheduler.update_task(task.id, result=result)
|
||||
|
|
|
|||
|
|
@ -465,9 +465,7 @@ class ApplicationServicesHandler:
|
|||
service, "read_receipt"
|
||||
)
|
||||
if new_token is not None and new_token.stream <= from_key:
|
||||
logger.debug(
|
||||
"Rejecting token lower than or equal to stored: %s" % (new_token,)
|
||||
)
|
||||
logger.debug("Rejecting token lower than or equal to stored: %s", new_token)
|
||||
return []
|
||||
|
||||
from_token = MultiWriterStreamToken(stream=from_key)
|
||||
|
|
@ -509,9 +507,7 @@ class ApplicationServicesHandler:
|
|||
service, "presence"
|
||||
)
|
||||
if new_token is not None and new_token <= from_key:
|
||||
logger.debug(
|
||||
"Rejecting token lower than or equal to stored: %s" % (new_token,)
|
||||
)
|
||||
logger.debug("Rejecting token lower than or equal to stored: %s", new_token)
|
||||
return []
|
||||
|
||||
for user in users:
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ from synapse.storage.databases.main.registration import (
|
|||
LoginTokenLookupResult,
|
||||
LoginTokenReused,
|
||||
)
|
||||
from synapse.types import JsonDict, Requester, UserID
|
||||
from synapse.types import JsonDict, Requester, StrCollection, UserID
|
||||
from synapse.util import stringutils as stringutils
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
|
|
@ -1547,6 +1547,31 @@ class AuthHandler:
|
|||
user_id, (token_id for _, token_id, _ in tokens_and_devices)
|
||||
)
|
||||
|
||||
async def delete_access_tokens_for_devices(
|
||||
self,
|
||||
user_id: str,
|
||||
device_ids: StrCollection,
|
||||
) -> None:
|
||||
"""Invalidate access tokens for the devices
|
||||
|
||||
Args:
|
||||
user_id: ID of user the tokens belong to
|
||||
device_ids: ID of device the tokens are associated with.
|
||||
If None, tokens associated with any device (or no device) will
|
||||
be deleted
|
||||
"""
|
||||
tokens_and_devices = await self.store.user_delete_access_tokens_for_devices(
|
||||
user_id,
|
||||
device_ids,
|
||||
)
|
||||
|
||||
# see if any modules want to know about this
|
||||
if self.password_auth_provider.on_logged_out_callbacks:
|
||||
for token, _, device_id in tokens_and_devices:
|
||||
await self.password_auth_provider.on_logged_out(
|
||||
user_id=user_id, device_id=device_id, access_token=token
|
||||
)
|
||||
|
||||
async def add_threepid(
|
||||
self, user_id: str, medium: str, address: str, validated_at: int
|
||||
) -> None:
|
||||
|
|
@ -1895,7 +1920,7 @@ def load_single_legacy_password_auth_provider(
|
|||
try:
|
||||
provider = module(config=config, account_handler=api)
|
||||
except Exception as e:
|
||||
logger.error("Error while initializing %r: %s", module, e)
|
||||
logger.exception("Error while initializing %r: %s", module, e)
|
||||
raise
|
||||
|
||||
# All methods that the module provides should be async, but this wasn't enforced
|
||||
|
|
@ -2428,7 +2453,7 @@ class PasswordAuthProvider:
|
|||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Module raised an exception in is_3pid_allowed: %s", e)
|
||||
logger.exception("Module raised an exception in is_3pid_allowed: %s", e)
|
||||
raise SynapseError(code=500, msg="Internal Server Error")
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -96,6 +96,14 @@ class DeactivateAccountHandler:
|
|||
403, "Deactivation of this user is forbidden", Codes.FORBIDDEN
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"%s requested deactivation of %s erase_data=%s id_server=%s",
|
||||
requester.user,
|
||||
user_id,
|
||||
erase_data,
|
||||
id_server,
|
||||
)
|
||||
|
||||
# FIXME: Theoretically there is a race here wherein user resets
|
||||
# password using threepid.
|
||||
|
||||
|
|
|
|||
|
|
@ -671,12 +671,12 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
except_device_id: optional device id which should not be deleted
|
||||
"""
|
||||
device_map = await self.store.get_devices_by_user(user_id)
|
||||
device_ids = list(device_map)
|
||||
if except_device_id is not None:
|
||||
device_ids = [d for d in device_ids if d != except_device_id]
|
||||
await self.delete_devices(user_id, device_ids)
|
||||
device_map.pop(except_device_id, None)
|
||||
user_device_ids = device_map.keys()
|
||||
await self.delete_devices(user_id, user_device_ids)
|
||||
|
||||
async def delete_devices(self, user_id: str, device_ids: List[str]) -> None:
|
||||
async def delete_devices(self, user_id: str, device_ids: StrCollection) -> None:
|
||||
"""Delete several devices
|
||||
|
||||
Args:
|
||||
|
|
@ -695,17 +695,10 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
else:
|
||||
raise
|
||||
|
||||
# Delete data specific to each device. Not optimised as it is not
|
||||
# considered as part of a critical path.
|
||||
for device_id in device_ids:
|
||||
await self._auth_handler.delete_access_tokens_for_user(
|
||||
user_id, device_id=device_id
|
||||
)
|
||||
await self.store.delete_e2e_keys_by_device(
|
||||
user_id=user_id, device_id=device_id
|
||||
)
|
||||
|
||||
if self.hs.config.experimental.msc3890_enabled:
|
||||
# Delete data specific to each device. Not optimised as its an
|
||||
# experimental MSC.
|
||||
if self.hs.config.experimental.msc3890_enabled:
|
||||
for device_id in device_ids:
|
||||
# Remove any local notification settings for this device in accordance
|
||||
# with MSC3890.
|
||||
await self._account_data_handler.remove_account_data_for_user(
|
||||
|
|
@ -713,6 +706,13 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
f"org.matrix.msc3890.local_notification_settings.{device_id}",
|
||||
)
|
||||
|
||||
# If we're deleting a lot of devices, a bunch of them may not have any
|
||||
# to-device messages queued up. We filter those out to avoid scheduling
|
||||
# unnecessary tasks.
|
||||
devices_with_messages = await self.store.get_devices_with_messages(
|
||||
user_id, device_ids
|
||||
)
|
||||
for device_id in devices_with_messages:
|
||||
# Delete device messages asynchronously and in batches using the task scheduler
|
||||
# We specify an upper stream id to avoid deleting non delivered messages
|
||||
# if an user re-uses a device ID.
|
||||
|
|
@ -726,6 +726,10 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
},
|
||||
)
|
||||
|
||||
await self._auth_handler.delete_access_tokens_for_devices(
|
||||
user_id, device_ids=device_ids
|
||||
)
|
||||
|
||||
# Pushers are deleted after `delete_access_tokens_for_user` is called so that
|
||||
# modules using `on_logged_out` hook can use them if needed.
|
||||
await self.hs.get_pusherpool().remove_pushers_by_devices(user_id, device_ids)
|
||||
|
|
@ -819,10 +823,11 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
# This should only happen if there are no updates, so we bail.
|
||||
return
|
||||
|
||||
for device_id in device_ids:
|
||||
logger.debug(
|
||||
"Notifying about update %r/%r, ID: %r", user_id, device_id, position
|
||||
)
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
for device_id in device_ids:
|
||||
logger.debug(
|
||||
"Notifying about update %r/%r, ID: %r", user_id, device_id, position
|
||||
)
|
||||
|
||||
# specify the user ID too since the user should always get their own device list
|
||||
# updates, even if they aren't in any rooms.
|
||||
|
|
@ -922,9 +927,6 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
# can't call self.delete_device because that will clobber the
|
||||
# access token so call the storage layer directly
|
||||
await self.store.delete_devices(user_id, [old_device_id])
|
||||
await self.store.delete_e2e_keys_by_device(
|
||||
user_id=user_id, device_id=old_device_id
|
||||
)
|
||||
|
||||
# tell everyone that the old device is gone and that the dehydrated
|
||||
# device has a new display name
|
||||
|
|
@ -946,7 +948,6 @@ class DeviceHandler(DeviceWorkerHandler):
|
|||
raise errors.NotFoundError()
|
||||
|
||||
await self.delete_devices(user_id, [device_id])
|
||||
await self.store.delete_e2e_keys_by_device(user_id=user_id, device_id=device_id)
|
||||
|
||||
@wrap_as_background_process("_handle_new_device_update_async")
|
||||
async def _handle_new_device_update_async(self) -> None:
|
||||
|
|
@ -1600,7 +1601,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
|||
if prev_stream_id is not None and cached_devices == {
|
||||
d["device_id"]: d for d in devices
|
||||
}:
|
||||
logging.info(
|
||||
logger.info(
|
||||
"Skipping device list resync for %s, as our cache matches already",
|
||||
user_id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ class DirectoryHandler:
|
|||
except RequestSendFailed:
|
||||
raise SynapseError(502, "Failed to fetch alias")
|
||||
except CodeMessageException as e:
|
||||
logging.warning(
|
||||
logger.warning(
|
||||
"Error retrieving alias %s -> %s %s", room_alias, e.code, e.msg
|
||||
)
|
||||
if e.code == 404:
|
||||
|
|
|
|||
|
|
@ -1062,8 +1062,8 @@ class FederationHandler:
|
|||
if self.hs.config.server.block_non_admin_invites:
|
||||
raise SynapseError(403, "This server does not accept room invites")
|
||||
|
||||
spam_check = await self._spam_checker_module_callbacks.user_may_invite(
|
||||
event.sender, event.state_key, event.room_id
|
||||
spam_check = (
|
||||
await self._spam_checker_module_callbacks.federated_user_may_invite(event)
|
||||
)
|
||||
if spam_check != NOT_SPAM:
|
||||
raise SynapseError(
|
||||
|
|
@ -1095,7 +1095,9 @@ class FederationHandler:
|
|||
rule = invite_config.get_invite_rule(event.sender)
|
||||
if rule == InviteRule.BLOCK:
|
||||
logger.info(
|
||||
f"Automatically rejecting invite from {event.sender} due to the invite filtering rules of {event.state_key}"
|
||||
"Automatically rejecting invite from %s due to the invite filtering rules of %s",
|
||||
event.sender,
|
||||
event.state_key,
|
||||
)
|
||||
raise SynapseError(
|
||||
403,
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ class IdentityHandler:
|
|||
|
||||
return data
|
||||
except HttpResponseException as e:
|
||||
logger.error("3PID bind failed with Matrix error: %r", e)
|
||||
logger.exception("3PID bind failed with Matrix error: %r", e)
|
||||
raise e.to_synapse_error()
|
||||
except RequestTimedOutError:
|
||||
raise SynapseError(500, "Timed out contacting identity server")
|
||||
|
|
@ -323,7 +323,7 @@ class IdentityHandler:
|
|||
# The remote server probably doesn't support unbinding (yet)
|
||||
logger.warning("Received %d response while unbinding threepid", e.code)
|
||||
else:
|
||||
logger.error("Failed to unbind threepid on identity server: %s", e)
|
||||
logger.exception("Failed to unbind threepid on identity server: %s", e)
|
||||
raise SynapseError(500, "Failed to contact identity server")
|
||||
except RequestTimedOutError:
|
||||
raise SynapseError(500, "Timed out contacting identity server")
|
||||
|
|
|
|||
|
|
@ -460,7 +460,7 @@ class MessageHandler:
|
|||
# date from the database in the same database transaction.
|
||||
await self.store.expire_event(event_id)
|
||||
except Exception as e:
|
||||
logger.error("Could not expire event %s: %r", event_id, e)
|
||||
logger.exception("Could not expire event %s: %r", event_id, e)
|
||||
|
||||
# Schedule the expiry of the next event to expire.
|
||||
await self._schedule_next_expiry()
|
||||
|
|
@ -2061,7 +2061,8 @@ class EventCreationHandler:
|
|||
# dependent on _DUMMY_EVENT_ROOM_EXCLUSION_EXPIRY
|
||||
logger.info(
|
||||
"Failed to send dummy event into room %s. Will exclude it from "
|
||||
"future attempts until cache expires" % (room_id,)
|
||||
"future attempts until cache expires",
|
||||
room_id,
|
||||
)
|
||||
now = self.clock.time_msec()
|
||||
self._rooms_to_exclude_from_dummy_event_insertion[room_id] = now
|
||||
|
|
@ -2120,7 +2121,9 @@ class EventCreationHandler:
|
|||
except AuthError:
|
||||
logger.info(
|
||||
"Failed to send dummy event into room %s for user %s due to "
|
||||
"lack of power. Will try another user" % (room_id, user_id)
|
||||
"lack of power. Will try another user",
|
||||
room_id,
|
||||
user_id,
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -563,12 +563,13 @@ class OidcProvider:
|
|||
raise ValueError("Unexpected subject")
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"OIDC Back-Channel Logout is enabled for issuer {self.issuer!r} "
|
||||
"OIDC Back-Channel Logout is enabled for issuer %r "
|
||||
"but it looks like the configured `user_mapping_provider` "
|
||||
"does not use the `sub` claim as subject. If it is the case, "
|
||||
"and you want Synapse to ignore the `sub` claim in OIDC "
|
||||
"Back-Channel Logouts, set `backchannel_logout_ignore_sub` "
|
||||
"to `true` in the issuer config."
|
||||
"to `true` in the issuer config.",
|
||||
self.issuer,
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
@ -826,10 +827,10 @@ class OidcProvider:
|
|||
if response.code < 400:
|
||||
logger.debug(
|
||||
"Invalid response from the authorization server: "
|
||||
'responded with a "{status}" '
|
||||
"but body has an error field: {error!r}".format(
|
||||
status=status, error=resp["error"]
|
||||
)
|
||||
'responded with a "%s" '
|
||||
"but body has an error field: %r",
|
||||
status,
|
||||
resp["error"],
|
||||
)
|
||||
|
||||
description = resp.get("error_description", error)
|
||||
|
|
@ -1385,7 +1386,8 @@ class OidcProvider:
|
|||
# support dynamic registration in Synapse at some point.
|
||||
if not self._config.backchannel_logout_enabled:
|
||||
logger.warning(
|
||||
f"Received an OIDC Back-Channel Logout request from issuer {self.issuer!r} but it is disabled in config"
|
||||
"Received an OIDC Back-Channel Logout request from issuer %r but it is disabled in config",
|
||||
self.issuer,
|
||||
)
|
||||
|
||||
# TODO: this responds with a 400 status code, which is what the OIDC
|
||||
|
|
@ -1797,5 +1799,5 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
|
|||
extras[key] = template.render(user=userinfo).strip()
|
||||
except Exception as e:
|
||||
# Log an error and skip this value (don't break login for this).
|
||||
logger.error("Failed to render OIDC extra attribute %s: %s" % (key, e))
|
||||
logger.exception("Failed to render OIDC extra attribute %s: %s", key, e)
|
||||
return extras
|
||||
|
|
|
|||
|
|
@ -539,11 +539,17 @@ class ProfileHandler:
|
|||
response: JsonDict = {}
|
||||
try:
|
||||
if just_field is None or just_field == ProfileFields.DISPLAYNAME:
|
||||
response["displayname"] = await self.store.get_profile_displayname(user)
|
||||
|
||||
displayname = await self.store.get_profile_displayname(user)
|
||||
# do not set the displayname field if it is None,
|
||||
# since then we send a null in the JSON response
|
||||
if displayname is not None:
|
||||
response["displayname"] = displayname
|
||||
if just_field is None or just_field == ProfileFields.AVATAR_URL:
|
||||
response["avatar_url"] = await self.store.get_profile_avatar_url(user)
|
||||
|
||||
avatar_url = await self.store.get_profile_avatar_url(user)
|
||||
# do not set the avatar_url field if it is None,
|
||||
# since then we send a null in the JSON response
|
||||
if avatar_url is not None:
|
||||
response["avatar_url"] = avatar_url
|
||||
if self.hs.config.experimental.msc4133_enabled:
|
||||
if just_field is None:
|
||||
response.update(await self.store.get_profile_fields(user))
|
||||
|
|
|
|||
|
|
@ -506,7 +506,7 @@ class RegistrationHandler:
|
|||
ratelimit=False,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to join new user to %r: %r", r, e)
|
||||
logger.exception("Failed to join new user to %r: %r", r, e)
|
||||
|
||||
async def _join_rooms(self, user_id: str) -> None:
|
||||
"""
|
||||
|
|
@ -596,7 +596,7 @@ class RegistrationHandler:
|
|||
# moving away from bare excepts is a good thing to do.
|
||||
logger.error("Failed to join new user to %r: %r", r, e)
|
||||
except Exception as e:
|
||||
logger.error("Failed to join new user to %r: %r", r, e, exc_info=True)
|
||||
logger.exception("Failed to join new user to %r: %r", r, e)
|
||||
|
||||
async def _auto_join_rooms(self, user_id: str) -> None:
|
||||
"""Automatically joins users to auto join rooms - creating the room in the first place
|
||||
|
|
|
|||
98
synapse/handlers/reports.py
Normal file
98
synapse/handlers/reports.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.types import (
|
||||
Requester,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ReportsHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._hs = hs
|
||||
self._store = hs.get_datastores().main
|
||||
self._clock = hs.get_clock()
|
||||
|
||||
# Ratelimiter for management of existing delayed events,
|
||||
# keyed by the requesting user ID.
|
||||
self._reports_ratelimiter = Ratelimiter(
|
||||
store=self._store,
|
||||
clock=self._clock,
|
||||
cfg=hs.config.ratelimiting.rc_reports,
|
||||
)
|
||||
|
||||
async def report_user(
|
||||
self, requester: Requester, target_user_id: str, reason: str
|
||||
) -> None:
|
||||
"""Files a report against a user from a user.
|
||||
|
||||
Rate and size limits are applied to the report. If the user being reported
|
||||
does not belong to this server, the report is ignored. This check is done
|
||||
after the limits to reduce DoS potential.
|
||||
|
||||
If the user being reported belongs to this server, but doesn't exist, we
|
||||
similarly ignore the report. The spec allows us to return an error if we
|
||||
want to, but we choose to hide that user's existence instead.
|
||||
|
||||
If the report is otherwise valid (for a user which exists on our server),
|
||||
we append it to the database for later processing.
|
||||
|
||||
Args:
|
||||
requester - The user filing the report.
|
||||
target_user_id - The user being reported.
|
||||
reason - The user-supplied reason the user is being reported.
|
||||
|
||||
Raises:
|
||||
SynapseError for BAD_REQUEST/BAD_JSON if the reason is too long.
|
||||
"""
|
||||
|
||||
await self._check_limits(requester)
|
||||
|
||||
if len(reason) > 1000:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Reason must be less than 1000 characters",
|
||||
Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if not self._hs.is_mine_id(target_user_id):
|
||||
return # hide that they're not ours/that we can't do anything about them
|
||||
|
||||
user = await self._store.get_user_by_id(target_user_id)
|
||||
if user is None:
|
||||
return # hide that they don't exist
|
||||
|
||||
await self._store.add_user_report(
|
||||
target_user_id=target_user_id,
|
||||
user_id=requester.user.to_string(),
|
||||
reason=reason,
|
||||
received_ts=self._clock.time_msec(),
|
||||
)
|
||||
|
||||
async def _check_limits(self, requester: Requester) -> None:
|
||||
await self._reports_ratelimiter.ratelimit(
|
||||
requester,
|
||||
requester.user.to_string(),
|
||||
)
|
||||
|
|
@ -51,6 +51,7 @@ from synapse.api.constants import (
|
|||
HistoryVisibility,
|
||||
JoinRules,
|
||||
Membership,
|
||||
MTextFields,
|
||||
RoomCreationPreset,
|
||||
RoomEncryptionAlgorithms,
|
||||
RoomTypes,
|
||||
|
|
@ -698,7 +699,7 @@ class RoomCreationHandler:
|
|||
except SynapseError as e:
|
||||
# again I'm not really expecting this to fail, but if it does, I'd rather
|
||||
# we returned the new room to the client at this point.
|
||||
logger.error("Unable to send updated alias events in old room: %s", e)
|
||||
logger.exception("Unable to send updated alias events in old room: %s", e)
|
||||
|
||||
try:
|
||||
await self.event_creation_handler.create_and_send_nonmember_event(
|
||||
|
|
@ -715,7 +716,7 @@ class RoomCreationHandler:
|
|||
except SynapseError as e:
|
||||
# again I'm not really expecting this to fail, but if it does, I'd rather
|
||||
# we returned the new room to the client at this point.
|
||||
logger.error("Unable to send updated alias events in new room: %s", e)
|
||||
logger.exception("Unable to send updated alias events in new room: %s", e)
|
||||
|
||||
async def create_room(
|
||||
self,
|
||||
|
|
@ -1303,7 +1304,13 @@ class RoomCreationHandler:
|
|||
topic = room_config["topic"]
|
||||
topic_event, topic_context = await create_event(
|
||||
EventTypes.Topic,
|
||||
{"topic": topic},
|
||||
{
|
||||
EventContentFields.TOPIC: topic,
|
||||
EventContentFields.M_TOPIC: {
|
||||
# The mimetype property defaults to `text/plain` if omitted.
|
||||
EventContentFields.M_TEXT: [{MTextFields.BODY: topic}]
|
||||
},
|
||||
},
|
||||
True,
|
||||
)
|
||||
events_to_send.append((topic_event, topic_context))
|
||||
|
|
|
|||
|
|
@ -922,7 +922,9 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
|||
rule = invite_config.get_invite_rule(requester.user.to_string())
|
||||
if rule == InviteRule.BLOCK:
|
||||
logger.info(
|
||||
f"Automatically rejecting invite from {target_id} due to the the invite filtering rules of {requester.user}"
|
||||
"Automatically rejecting invite from %s due to the the invite filtering rules of %s",
|
||||
target_id,
|
||||
requester.user,
|
||||
)
|
||||
raise SynapseError(
|
||||
403,
|
||||
|
|
@ -1570,7 +1572,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
|||
require_consent=False,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("Error kicking guest user: %s" % (e,))
|
||||
logger.exception("Error kicking guest user: %s", e)
|
||||
|
||||
async def lookup_room_alias(
|
||||
self, room_alias: RoomAlias
|
||||
|
|
|
|||
|
|
@ -54,6 +54,9 @@ class RoomPolicyHandler:
|
|||
Returns:
|
||||
bool: True if the event is allowed in the room, False otherwise.
|
||||
"""
|
||||
if event.type == "org.matrix.msc4284.policy" and event.state_key is not None:
|
||||
return True # always allow policy server change events
|
||||
|
||||
policy_event = await self._storage_controllers.state.get_current_state_event(
|
||||
event.room_id, "org.matrix.msc4284.policy", ""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -111,7 +111,15 @@ class RoomSummaryHandler:
|
|||
# If a user tries to fetch the same page multiple times in quick succession,
|
||||
# only process the first attempt and return its result to subsequent requests.
|
||||
self._pagination_response_cache: ResponseCache[
|
||||
Tuple[str, str, bool, Optional[int], Optional[int], Optional[str]]
|
||||
Tuple[
|
||||
str,
|
||||
str,
|
||||
bool,
|
||||
Optional[int],
|
||||
Optional[int],
|
||||
Optional[str],
|
||||
Optional[Tuple[str, ...]],
|
||||
]
|
||||
] = ResponseCache(
|
||||
hs.get_clock(),
|
||||
"get_room_hierarchy",
|
||||
|
|
@ -126,6 +134,7 @@ class RoomSummaryHandler:
|
|||
max_depth: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
from_token: Optional[str] = None,
|
||||
remote_room_hosts: Optional[Tuple[str, ...]] = None,
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Implementation of the room hierarchy C-S API.
|
||||
|
|
@ -143,6 +152,9 @@ class RoomSummaryHandler:
|
|||
limit: An optional limit on the number of rooms to return per
|
||||
page. Must be a positive integer.
|
||||
from_token: An optional pagination token.
|
||||
remote_room_hosts: An optional list of remote homeserver server names. If defined,
|
||||
each host will be used to try and fetch the room hierarchy. Must be a tuple so
|
||||
that it can be hashed by the `RoomSummaryHandler._pagination_response_cache`.
|
||||
|
||||
Returns:
|
||||
The JSON hierarchy dictionary.
|
||||
|
|
@ -162,6 +174,7 @@ class RoomSummaryHandler:
|
|||
max_depth,
|
||||
limit,
|
||||
from_token,
|
||||
remote_room_hosts,
|
||||
),
|
||||
self._get_room_hierarchy,
|
||||
requester.user.to_string(),
|
||||
|
|
@ -170,6 +183,7 @@ class RoomSummaryHandler:
|
|||
max_depth,
|
||||
limit,
|
||||
from_token,
|
||||
remote_room_hosts,
|
||||
)
|
||||
|
||||
async def _get_room_hierarchy(
|
||||
|
|
@ -180,6 +194,7 @@ class RoomSummaryHandler:
|
|||
max_depth: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
from_token: Optional[str] = None,
|
||||
remote_room_hosts: Optional[Tuple[str, ...]] = None,
|
||||
) -> JsonDict:
|
||||
"""See docstring for SpaceSummaryHandler.get_room_hierarchy."""
|
||||
|
||||
|
|
@ -199,7 +214,7 @@ class RoomSummaryHandler:
|
|||
|
||||
if not local_room:
|
||||
room_hierarchy = await self._summarize_remote_room_hierarchy(
|
||||
_RoomQueueEntry(requested_room_id, ()),
|
||||
_RoomQueueEntry(requested_room_id, remote_room_hosts or ()),
|
||||
False,
|
||||
)
|
||||
root_room_entry = room_hierarchy[0]
|
||||
|
|
@ -240,7 +255,7 @@ class RoomSummaryHandler:
|
|||
processed_rooms = set(pagination_session["processed_rooms"])
|
||||
else:
|
||||
# The queue of rooms to process, the next room is last on the stack.
|
||||
room_queue = [_RoomQueueEntry(requested_room_id, ())]
|
||||
room_queue = [_RoomQueueEntry(requested_room_id, remote_room_hosts or ())]
|
||||
|
||||
# Rooms we have already processed.
|
||||
processed_rooms = set()
|
||||
|
|
@ -701,7 +716,7 @@ class RoomSummaryHandler:
|
|||
# The API doesn't return the room version so assume that a
|
||||
# join rule of knock is valid.
|
||||
if (
|
||||
room.get("join_rule")
|
||||
room.get("join_rule", JoinRules.PUBLIC)
|
||||
in (JoinRules.PUBLIC, JoinRules.KNOCK, JoinRules.KNOCK_RESTRICTED)
|
||||
or room.get("world_readable") is True
|
||||
):
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class SamlHandler:
|
|||
)
|
||||
|
||||
# Since SAML sessions timeout it is useful to log when they were created.
|
||||
logger.info("Initiating a new SAML session: %s" % (reqid,))
|
||||
logger.info("Initiating a new SAML session: %s", reqid)
|
||||
|
||||
now = self.clock.time_msec()
|
||||
self._outstanding_requests_dict[reqid] = Saml2SessionData(
|
||||
|
|
|
|||
|
|
@ -238,7 +238,7 @@ class SendEmailHandler:
|
|||
multipart_msg.attach(text_part)
|
||||
multipart_msg.attach(html_part)
|
||||
|
||||
logger.info("Sending email to %s" % email_address)
|
||||
logger.info("Sending email to %s", email_address)
|
||||
|
||||
await self._sendmail(
|
||||
self._reactor,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ from typing import (
|
|||
List,
|
||||
Literal,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
|
|
@ -73,6 +74,7 @@ from synapse.types.handlers.sliding_sync import (
|
|||
SlidingSyncResult,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import MutableOverlayMapping
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
|
@ -245,9 +247,11 @@ class SlidingSyncRoomLists:
|
|||
# Note: this won't include rooms the user has left themselves. We add back
|
||||
# `newly_left` rooms below. This is more efficient than fetching all rooms and
|
||||
# then filtering out the old left rooms.
|
||||
room_membership_for_user_map = (
|
||||
await self.store.get_sliding_sync_rooms_for_user_from_membership_snapshots(
|
||||
user_id
|
||||
room_membership_for_user_map: MutableMapping[str, RoomsForUserSlidingSync] = (
|
||||
MutableOverlayMapping(
|
||||
await self.store.get_sliding_sync_rooms_for_user_from_membership_snapshots(
|
||||
user_id
|
||||
)
|
||||
)
|
||||
)
|
||||
# To play nice with the rewind logic below, we need to go fetch the rooms the
|
||||
|
|
@ -268,26 +272,12 @@ class SlidingSyncRoomLists:
|
|||
)
|
||||
)
|
||||
if self_leave_room_membership_for_user_map:
|
||||
# FIXME: It would be nice to avoid this copy but since
|
||||
# `get_sliding_sync_rooms_for_user_from_membership_snapshots` is cached, it
|
||||
# can't return a mutable value like a `dict`. We make the copy to get a
|
||||
# mutable dict that we can change. We try to only make a copy when necessary
|
||||
# (if we actually need to change something) as in most cases, the logic
|
||||
# doesn't need to run.
|
||||
room_membership_for_user_map = dict(room_membership_for_user_map)
|
||||
room_membership_for_user_map.update(self_leave_room_membership_for_user_map)
|
||||
|
||||
# Remove invites from ignored users
|
||||
ignored_users = await self.store.ignored_users(user_id)
|
||||
invite_config = await self.store.get_invite_config_for_user(user_id)
|
||||
if ignored_users:
|
||||
# FIXME: It would be nice to avoid this copy but since
|
||||
# `get_sliding_sync_rooms_for_user_from_membership_snapshots` is cached, it
|
||||
# can't return a mutable value like a `dict`. We make the copy to get a
|
||||
# mutable dict that we can change. We try to only make a copy when necessary
|
||||
# (if we actually need to change something) as in most cases, the logic
|
||||
# doesn't need to run.
|
||||
room_membership_for_user_map = dict(room_membership_for_user_map)
|
||||
# Make a copy so we don't run into an error: `dictionary changed size during
|
||||
# iteration`, when we remove items
|
||||
for room_id in list(room_membership_for_user_map.keys()):
|
||||
|
|
@ -316,13 +306,6 @@ class SlidingSyncRoomLists:
|
|||
sync_config.user, room_membership_for_user_map, to_token=to_token
|
||||
)
|
||||
if changes:
|
||||
# FIXME: It would be nice to avoid this copy but since
|
||||
# `get_sliding_sync_rooms_for_user_from_membership_snapshots` is cached, it
|
||||
# can't return a mutable value like a `dict`. We make the copy to get a
|
||||
# mutable dict that we can change. We try to only make a copy when necessary
|
||||
# (if we actually need to change something) as in most cases, the logic
|
||||
# doesn't need to run.
|
||||
room_membership_for_user_map = dict(room_membership_for_user_map)
|
||||
for room_id, change in changes.items():
|
||||
if change is None:
|
||||
# Remove rooms that the user joined after the `to_token`
|
||||
|
|
@ -364,13 +347,6 @@ class SlidingSyncRoomLists:
|
|||
newly_left_room_map.keys() - room_membership_for_user_map.keys()
|
||||
)
|
||||
if missing_newly_left_rooms:
|
||||
# FIXME: It would be nice to avoid this copy but since
|
||||
# `get_sliding_sync_rooms_for_user_from_membership_snapshots` is cached, it
|
||||
# can't return a mutable value like a `dict`. We make the copy to get a
|
||||
# mutable dict that we can change. We try to only make a copy when necessary
|
||||
# (if we actually need to change something) as in most cases, the logic
|
||||
# doesn't need to run.
|
||||
room_membership_for_user_map = dict(room_membership_for_user_map)
|
||||
for room_id in missing_newly_left_rooms:
|
||||
newly_left_room_for_user = newly_left_room_map[room_id]
|
||||
# This should be a given
|
||||
|
|
@ -461,6 +437,10 @@ class SlidingSyncRoomLists:
|
|||
else:
|
||||
room_membership_for_user_map.pop(room_id, None)
|
||||
|
||||
# Remove any rooms that we globally exclude from sync.
|
||||
for room_id in self.rooms_to_exclude_globally:
|
||||
room_membership_for_user_map.pop(room_id, None)
|
||||
|
||||
dm_room_ids = await self._get_dm_rooms_for_user(user_id)
|
||||
|
||||
if sync_config.lists:
|
||||
|
|
@ -577,14 +557,6 @@ class SlidingSyncRoomLists:
|
|||
|
||||
if sync_config.room_subscriptions:
|
||||
with start_active_span("assemble_room_subscriptions"):
|
||||
# FIXME: It would be nice to avoid this copy but since
|
||||
# `get_sliding_sync_rooms_for_user_from_membership_snapshots` is cached, it
|
||||
# can't return a mutable value like a `dict`. We make the copy to get a
|
||||
# mutable dict that we can change. We try to only make a copy when necessary
|
||||
# (if we actually need to change something) as in most cases, the logic
|
||||
# doesn't need to run.
|
||||
room_membership_for_user_map = dict(room_membership_for_user_map)
|
||||
|
||||
# Find which rooms are partially stated and may need to be filtered out
|
||||
# depending on the `required_state` requested (see below).
|
||||
partial_state_rooms = await self.store.get_partial_rooms()
|
||||
|
|
|
|||
|
|
@ -1230,12 +1230,16 @@ class SsoHandler:
|
|||
if expected_user_id is not None and user_id != expected_user_id:
|
||||
logger.error(
|
||||
"Received a logout notification from SSO provider "
|
||||
f"{auth_provider_id!r} for the user {expected_user_id!r}, but with "
|
||||
f"a session ID ({auth_provider_session_id!r}) which belongs to "
|
||||
f"{user_id!r}. This may happen when the SSO provider user mapper "
|
||||
"%r for the user %r, but with "
|
||||
"a session ID (%r) which belongs to "
|
||||
"%r. This may happen when the SSO provider user mapper "
|
||||
"uses something else than the standard attribute as mapping ID. "
|
||||
"For OIDC providers, set `backchannel_logout_ignore_sub` to `true` "
|
||||
"in the provider config if that is the case."
|
||||
"in the provider config if that is the case.",
|
||||
auth_provider_id,
|
||||
expected_user_id,
|
||||
auth_provider_session_id,
|
||||
user_id,
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ from synapse.metrics import event_processing_positions
|
|||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage.databases.main.state_deltas import StateDelta
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.events import get_plain_text_topic_from_event_content
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
|
@ -299,7 +300,9 @@ class StatsHandler:
|
|||
elif delta.event_type == EventTypes.Name:
|
||||
room_state["name"] = event_content.get("name")
|
||||
elif delta.event_type == EventTypes.Topic:
|
||||
room_state["topic"] = event_content.get("topic")
|
||||
room_state["topic"] = get_plain_text_topic_from_event_content(
|
||||
event_content
|
||||
)
|
||||
elif delta.event_type == EventTypes.RoomAvatar:
|
||||
room_state["avatar"] = event_content.get("url")
|
||||
elif delta.event_type == EventTypes.CanonicalAlias:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue