Compare commits

..

2 Commits

Author SHA1 Message Date
Erik Johnston
cfa5eca168 Newsfile 2024-05-22 15:02:31 +01:00
Erik Johnston
56e6abfecc Allow loading modules in Docker image
This is done by mounting a `/modules` directory and installing packages
into that.
2024-05-22 15:01:09 +01:00
510 changed files with 10717 additions and 67317 deletions

View File

@@ -36,11 +36,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For PRs, we only run each type of test with the oldest Python version supported (which
# is Python 3.9 right now)
# is Python 3.8 right now)
trial_sqlite_tests = [
{
"python-version": "3.9",
"python-version": "3.8",
"database": "sqlite",
"extras": "all",
}
@@ -53,14 +53,14 @@ if not IS_PR:
"database": "sqlite",
"extras": "all",
}
for version in ("3.10", "3.11", "3.12", "3.13")
for version in ("3.9", "3.10", "3.11", "3.12")
)
trial_postgres_tests = [
{
"python-version": "3.9",
"python-version": "3.8",
"database": "postgres",
"postgres-version": "13",
"postgres-version": "11",
"extras": "all",
}
]
@@ -68,16 +68,16 @@ trial_postgres_tests = [
if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.13",
"python-version": "3.12",
"database": "postgres",
"postgres-version": "17",
"postgres-version": "16",
"extras": "all",
}
)
trial_no_extra_tests = [
{
"python-version": "3.9",
"python-version": "3.8",
"database": "sqlite",
"extras": "",
}
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs.
#
# For each type of test we only run on bullseye on PRs
# For each type of test we only run on focal on PRs
sytest_tests = [
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
"postgres": "postgres",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
"postgres": "multi-postgres",
"workers": "workers",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
@@ -127,11 +127,11 @@ if not IS_PR:
sytest_tests.extend(
[
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
"reactor": "asyncio",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "focal",
"postgres": "postgres",
"reactor": "asyncio",
},

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
# this script is run by GitHub Actions in a plain `jammy` container; it
# this script is run by GitHub Actions in a plain `focal` container; it
# - installs the minimal system requirements, and poetry;
# - patches the project definition file to refer to old versions only;
# - creates a venv with these old versions using poetry; and finally

View File

@@ -2,4 +2,4 @@
(using a matrix.org account if necessary). We do not use GitHub issues for
support.
**If you want to report a security issue** please see https://element.io/security/security-disclosure-policy
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/

View File

@@ -7,7 +7,7 @@ body:
**THIS IS NOT A SUPPORT CHANNEL!**
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
If you want to report a security issue, please see https://element.io/security/security-disclosure-policy
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.

View File

@@ -14,7 +14,7 @@ permissions:
id-token: write # needed for signing the images with GitHub OIDC Token
jobs:
build:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- name: Set up QEMU
id: qemu
@@ -30,7 +30,7 @@ jobs:
run: docker buildx inspect
- name: Install Cosign
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.5.0
- name: Checkout repository
uses: actions/checkout@v4
@@ -72,7 +72,7 @@ jobs:
- name: Build and push all platforms
id: build-and-push
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
push: true
labels: |

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3.1.4
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View File

@@ -29,13 +29,17 @@ jobs:
with:
install-project: "false"
- name: Run ruff check
- name: Import order (isort)
continue-on-error: true
run: poetry run ruff check --fix .
run: poetry run isort .
- name: Run ruff format
- name: Code style (black)
continue-on-error: true
run: poetry run ruff format --quiet .
run: poetry run black .
- name: Semantic checks (ruff)
continue-on-error: true
run: poetry run ruff --fix .
- run: cargo clippy --all-features --fix -- -D warnings
continue-on-error: true
@@ -45,4 +49,4 @@ jobs:
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Attempt to fix linting"
commit_message: "Attempt to fix linting"

View File

@@ -132,9 +132,9 @@ jobs:
fail-fast: false
matrix:
include:
- sytest-tag: bullseye
- sytest-tag: focal
- sytest-tag: bullseye
- sytest-tag: focal
postgres: postgres
workers: workers
redis: redis

View File

@@ -91,19 +91,10 @@ jobs:
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
- name: Artifact name
id: artifact-name
# We can't have colons in the upload name of the artifact, so we convert
# e.g. `debian:sid` to `sid`.
env:
DISTRO: ${{ matrix.distro }}
run: |
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
- name: Upload debs as artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
with:
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
name: debs
path: debs/*
build-wheels:
@@ -111,7 +102,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04, macos-13]
os: [ubuntu-20.04, macos-11]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
@@ -121,9 +112,9 @@ jobs:
exclude:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-13"
os: "macos-11"
# Don't build aarch64 wheels on mac.
- os: "macos-13"
- os: "macos-11"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true
@@ -139,7 +130,7 @@ jobs:
python-version: "3.x"
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.19.1
run: python -m pip install cibuildwheel==2.16.2
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
@@ -153,7 +144,7 @@ jobs:
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
@@ -165,9 +156,9 @@ jobs:
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
with:
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
name: Wheel
path: ./wheelhouse/*.whl
build-sdist:
@@ -186,7 +177,7 @@ jobs:
- name: Build sdist
run: python -m build --sdist
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
with:
name: Sdist
path: dist/*.tar.gz
@@ -203,23 +194,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all workflow run artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
- name: Build a tarball for the debs
# We need to merge all the debs uploads into one folder, then compress
# that.
run: |
mkdir debs
mv debs*/* debs/
tar -cvJf debs.tar.xz debs
run: tar -cvJf debs.tar.xz debs
- name: Attach to release
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
uses: softprops/action-gh-release@v0.1.15
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
files: |
Sdist/*
Wheel*/*
Wheel/*
debs.tar.xz
# if it's not already published, keep the release as a draft.
draft: true

View File

@@ -21,7 +21,6 @@ jobs:
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
steps:
- uses: dorny/paths-filter@v3
id: filter
@@ -74,9 +73,6 @@ jobs:
- 'poetry.lock'
- '.github/workflows/tests.yml'
linting_readme:
- 'README.rst'
check-sampleconfig:
runs-on: ubuntu-latest
needs: changes
@@ -131,11 +127,15 @@ jobs:
with:
install-project: "false"
- name: Run ruff check
run: poetry run ruff check --output-format=github .
- name: Import order (isort)
run: poetry run isort --check --diff .
- name: Run ruff format
run: poetry run ruff format --check .
- name: Code style (black)
run: poetry run black --check --diff .
- name: Semantic checks (ruff)
# --quiet suppresses the update check.
run: poetry run ruff --quiet .
lint-mypy:
runs-on: ubuntu-latest
@@ -269,20 +269,6 @@ jobs:
- run: cargo fmt --check
# This is to detect issues with the rst file, which can otherwise cause issues
# when uploading packages to PyPi.
lint-readme:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install rstcheck"
- run: "rstcheck --report-level=WARNING README.rst"
# Dummy step to gate other tests on without repeating the whole list
linting-done:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
@@ -298,10 +284,9 @@ jobs:
- lint-clippy
- lint-clippy-nightly
- lint-rustfmt
- lint-readme
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@v3
- uses: matrix-org/done-action@v2
with:
needs: ${{ toJSON(needs) }}
@@ -316,7 +301,6 @@ jobs:
lint-clippy
lint-clippy-nightly
lint-rustfmt
lint-readme
calculate-test-jobs:
@@ -397,7 +381,7 @@ jobs:
needs:
- linting-done
- changes
runs-on: ubuntu-22.04
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v4
@@ -409,12 +393,12 @@ jobs:
# their build dependencies
- run: |
sudo apt-get -qq update
sudo apt-get -qq install build-essential libffi-dev python3-dev \
sudo apt-get -qq install build-essential libffi-dev python-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@v5
with:
python-version: '3.9'
python-version: '3.8'
- name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
@@ -458,7 +442,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.9"]
python-version: ["pypy-3.8"]
extras: ["all"]
steps:
@@ -495,9 +479,6 @@ jobs:
volumes:
- ${{ github.workspace }}:/src
env:
# If this is a pull request to a release branch, use that branch as default branch for sytest, else use develop
# This works because the release script always create a branch on the sytest repo with the same name as the release branch
SYTEST_DEFAULT_BRANCH: ${{ startsWith(github.base_ref, 'release-') && github.base_ref || 'develop' }}
SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.job.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
@@ -580,11 +561,11 @@ jobs:
strategy:
matrix:
include:
- python-version: "3.9"
postgres-version: "13"
- python-version: "3.8"
postgres-version: "11"
- python-version: "3.13"
postgres-version: "17"
- python-version: "3.11"
postgres-version: "15"
services:
postgres:
@@ -733,7 +714,7 @@ jobs:
- linting-done
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@v3
- uses: matrix-org/done-action@v2
with:
needs: ${{ toJSON(needs) }}

View File

@@ -99,11 +99,11 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:bullseye
image: matrixdotorg/sytest-synapse:focal
volumes:
- ${{ github.workspace }}:/src

2808
CHANGES.md

File diff suppressed because it is too large Load Diff

224
Cargo.lock generated
View File

@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.95"
version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]]
name = "arc-swap"
@@ -35,6 +35,12 @@ version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "bitflags"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "blake2"
version = "0.10.6"
@@ -61,9 +67,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bytes"
version = "1.9.0"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
[[package]]
name = "cfg-if"
@@ -124,8 +130,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi",
"wasm-bindgen",
]
[[package]]
@@ -154,9 +162,9 @@ dependencies = [
[[package]]
name = "heck"
version = "0.5.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "hex"
@@ -166,9 +174,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.2.0"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
dependencies = [
"bytes",
"fnv",
@@ -204,9 +212,9 @@ dependencies = [
[[package]]
name = "lazy_static"
version = "1.5.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
@@ -215,10 +223,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
[[package]]
name = "log"
version = "0.4.25"
name = "lock_api"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
[[package]]
name = "memchr"
@@ -247,6 +265,29 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "parking_lot"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets",
]
[[package]]
name = "portable-atomic"
version = "1.6.0"
@@ -261,25 +302,25 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
version = "1.0.89"
version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
version = "0.23.4"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc"
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
dependencies = [
"anyhow",
"cfg-if",
"indoc",
"libc",
"memoffset",
"once_cell",
"parking_lot",
"portable-atomic",
"pyo3-build-config",
"pyo3-ffi",
@@ -289,9 +330,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.23.4"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7"
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
dependencies = [
"once_cell",
"target-lexicon",
@@ -299,9 +340,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
version = "0.23.4"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d"
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
dependencies = [
"libc",
"pyo3-build-config",
@@ -309,9 +350,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.12.0"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3eb421dc86d38d08e04b927b02424db480be71b777fa3a56f32e2f2a3a1a3b08"
checksum = "2af49834b8d2ecd555177e63b273b708dea75150abc6f5341d0a6e1a9623976c"
dependencies = [
"arc-swap",
"log",
@@ -320,9 +361,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.23.4"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7"
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -332,9 +373,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
version = "0.23.4"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4"
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
dependencies = [
"heck",
"proc-macro2",
@@ -345,9 +386,9 @@ dependencies = [
[[package]]
name = "pythonize"
version = "0.23.0"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91a6ee7a084f913f98d70cdc3ebec07e852b735ae3059a1500db2661265da9ff"
checksum = "9d0664248812c38cc55a4ed07f88e4df516ce82604b93b1ffdc041aa77a6cb3c"
dependencies = [
"pyo3",
"serde",
@@ -393,10 +434,19 @@ dependencies = [
]
[[package]]
name = "regex"
version = "1.11.1"
name = "redox_syscall"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e"
dependencies = [
"bitflags",
]
[[package]]
name = "regex"
version = "1.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
dependencies = [
"aho-corasick",
"memchr",
@@ -406,9 +456,9 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.8"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
dependencies = [
"aho-corasick",
"memchr",
@@ -417,9 +467,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.8.5"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
name = "ryu"
@@ -428,19 +478,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
[[package]]
name = "serde"
version = "1.0.217"
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
version = "1.0.202"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.217"
version = "1.0.202"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838"
dependencies = [
"proc-macro2",
"quote",
@@ -449,12 +505,11 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.137"
version = "1.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b"
checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
@@ -481,6 +536,12 @@ dependencies = [
"digest",
]
[[package]]
name = "smallvec"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "subtle"
version = "2.5.0"
@@ -489,9 +550,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "2.0.85"
version = "2.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
checksum = "c993ed8ccba56ae856363b1845da7266a7cb78e1d146c8a32d54b45a8b831fc9"
dependencies = [
"proc-macro2",
"quote",
@@ -536,10 +597,11 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ulid"
version = "1.1.4"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f294bff79170ed1c5633812aff1e565c35d993a36e757f9bc0accf5eec4e6045"
checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259"
dependencies = [
"getrandom",
"rand",
"web-time",
]
@@ -631,3 +693,67 @@ dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "windows-targets"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
[[package]]
name = "windows_i686_gnu"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
[[package]]
name = "windows_i686_msvc"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"

View File

@@ -1,34 +1,21 @@
.. image:: ./docs/element_logo_white_bg.svg
:height: 60px
=========================================================================
Synapse |support| |development| |documentation| |license| |pypi| |python|
=========================================================================
**Element Synapse - Matrix homeserver implementation**
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
maintained by the Matrix.org Foundation. We began rapid development in 2014,
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
in earnest today.
|support| |development| |documentation| |license| |pypi| |python|
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real time communications. You can directly run
and manage the source code in this repository, available under an AGPL
license. There is no support provided from Element unless you have a
subscription.
Subscription alternative
========================
Alternatively, for those that need an enterprise-ready solution, Element
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
ESS builds on Synapse to offer a complete Matrix-based backend including the full
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide
deployment. It includes advanced identity management, auditing,
moderation and data retention options as well as Long Term Support and
SLAs. ESS can be used to support any Matrix-based frontend client.
Briefly, Matrix is an open standard for communications on the internet, supporting
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
<https://spec.matrix.org/>`_ describes the technical details.
.. contents::
🛠️ Installing and configuration
===============================
Installing and configuration
============================
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
@@ -118,8 +105,8 @@ Following this advice ensures that even if an XSS is found in Synapse, the
impact to other applications will be minimal.
🧪 Testing a new installation
=============================
Testing a new installation
==========================
The easiest way to try out your new Synapse installation is by connecting to it
from a web client.
@@ -158,7 +145,7 @@ it:
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
the public internet. Without it, anyone can freely register accounts on your homeserver.
This can be exploited by attackers to create spambots targeting the rest of the Matrix
This can be exploited by attackers to create spambots targetting the rest of the Matrix
federation.
Your new user name will be formed partly from the ``server_name``, and partly
@@ -172,20 +159,8 @@ the form of::
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'User name' box.
🎯 Troubleshooting and support
==============================
🚀 Professional support
-----------------------
Enterprise quality support for Synapse including SLAs is available as part of an
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
and access the `knowledge base <https://ems-docs.element.io>`_.
🤝 Community support
--------------------
Troubleshooting and support
===========================
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
includes tips on dealing with some common problems. For more details, see
@@ -201,8 +176,8 @@ issues for support requests, only for bug reports and feature requests.
.. |docs| replace:: ``docs``
.. _docs: docs
🪪 Identity Servers
===================
Identity Servers
================
Identity servers have the job of mapping email addresses and other 3rd Party
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
@@ -231,8 +206,8 @@ an email address with your account, or send an invite to another user via their
email address.
🛠️ Development
==============
Development
===========
We welcome contributions to Synapse from the community!
The best place to get started is our
@@ -250,8 +225,8 @@ Alongside all that, join our developer community on Matrix:
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
:alt: (get community support in #synapse:matrix.org)
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
:alt: (get support on #synapse:matrix.org)
:target: https://matrix.to/#/#synapse:matrix.org
.. |development| image:: https://img.shields.io/matrix/synapse-dev:matrix.org?label=development&logo=matrix

View File

@@ -1,10 +1,8 @@
# A build script for poetry that adds the rust extension.
import itertools
import os
from typing import Any, Dict
from packaging.specifiers import SpecifierSet
from setuptools_rust import Binding, RustExtension
@@ -16,8 +14,6 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
target="synapse.synapse_rust",
path=cargo_toml_path,
binding=Binding.PyO3,
# This flag is a no-op in the latest versions. Instead, we need to
# specify this in the `bdist_wheel` config below.
py_limited_api=True,
# We force always building in release mode, as we can't tell the
# difference between using `poetry` in development vs production.
@@ -25,18 +21,3 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
)
setup_kwargs.setdefault("rust_extensions", []).append(extension)
setup_kwargs["zip_safe"] = False
# We lookup the minimum supported python version by looking at
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
# version that matches. We then convert that into the `py_limited_api` form,
# e.g. cp39 for python 3.9.
py_limited_api: str
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
for minor_version in itertools.count(start=8):
if f"3.{minor_version}.0" in python_bounds:
py_limited_api = f"cp3{minor_version}"
break
setup_kwargs.setdefault("options", {}).setdefault("bdist_wheel", {})[
"py_limited_api"
] = py_limited_api

View File

@@ -0,0 +1 @@
Add the ability to auto-accept invites on the behalf of users. See the [`auto_accept_invites`](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#auto-accept-invites) config option for details.

1
changelog.d/17204.doc Normal file
View File

@@ -0,0 +1 @@
Update OIDC documentation: by default Matrix doesn't query userinfo endpoint, then claims should be put on id_token.

1
changelog.d/17211.misc Normal file
View File

@@ -0,0 +1 @@
Reduce work of calculating outbound device lists updates.

1
changelog.d/17216.misc Normal file
View File

@@ -0,0 +1 @@
Improve performance of calculating device lists changes in `/sync`.

1
changelog.d/17228.docker Normal file
View File

@@ -0,0 +1 @@
Support loading pluggable modules from Docker images.

View File

@@ -21,8 +21,7 @@
#
#
"""Starts a synapse client console."""
""" Starts a synapse client console. """
import argparse
import binascii
import cmd
@@ -245,7 +244,7 @@ class SynapseCmd(cmd.Cmd):
if "flows" not in json_res:
print("Failed to find any login flows.")
return False
defer.returnValue(False)
flow = json_res["flows"][0] # assume first is the one we want.
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
@@ -254,8 +253,8 @@ class SynapseCmd(cmd.Cmd):
"Unable to login via the command line client. Please visit "
"%s to login." % fallback_url
)
return False
return True
defer.returnValue(False)
defer.returnValue(True)
def do_emailrequest(self, line):
"""Requests the association of a third party identifier

View File

@@ -78,7 +78,7 @@ class TwistedHttpClient(HttpClient):
url, data, headers_dict={"Content-Type": ["application/json"]}
)
body = yield readBody(response)
return response.code, body
defer.returnValue((response.code, body))
@defer.inlineCallbacks
def get_json(self, url, args=None):
@@ -88,7 +88,7 @@ class TwistedHttpClient(HttpClient):
url = "%s?%s" % (url, qs)
response = yield self._create_get_request(url)
body = yield readBody(response)
return json.loads(body)
defer.returnValue(json.loads(body))
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request"""
@@ -134,7 +134,7 @@ class TwistedHttpClient(HttpClient):
response = yield self._create_request(method, url)
body = yield readBody(response)
return json.loads(body)
defer.returnValue(json.loads(body))
@defer.inlineCallbacks
def _create_request(
@@ -173,7 +173,7 @@ class TwistedHttpClient(HttpClient):
if self.verbose:
print("Status %s %s" % (response.code, response.phrase))
print(pformat(list(response.headers.getAllRawHeaders())))
return response
defer.returnValue(response)
def sleep(self, seconds):
d = defer.Deferred()

View File

@@ -30,6 +30,3 @@ docker-compose up -d
### More information
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
**For a more comprehensive Docker Compose example showcasing a full Matrix 2.0 stack, please see
https://github.com/element-hq/element-docker-demo**

View File

@@ -51,7 +51,7 @@ services:
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
db:
image: docker.io/postgres:15-alpine
image: docker.io/postgres:12-alpine
# Change that password, of course!
environment:
- POSTGRES_USER=synapse

View File

@@ -8,9 +8,6 @@ All examples and snippets assume that your Synapse service is called `synapse` i
An example Docker Compose file can be found [here](docker-compose.yaml).
**For a more comprehensive Docker Compose example, showcasing a full Matrix 2.0 stack (originally based on this
docker-compose.yaml), please see https://github.com/element-hq/element-docker-demo**
## Worker Service Examples in Docker Compose
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).

View File

@@ -20,8 +20,8 @@
#
import argparse
import cgi
import datetime
import html
import json
import urllib.request
from typing import List
@@ -85,7 +85,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
"name": name,
"type": pdu.get("pdu_type"),
"state_key": pdu.get("state_key"),
"content": html.escape(json.dumps(pdu.get("content")), quote=True),
"content": cgi.escape(json.dumps(pdu.get("content")), quote=True),
"time": t,
"depth": pdu.get("depth"),
}

277
debian/changelog vendored
View File

@@ -1,280 +1,3 @@
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
* New Synapse release 1.124.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
matrix-synapse-py3 (1.123.0) stable; urgency=medium
* New Synapse release 1.123.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
* New Synapse release 1.123.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
matrix-synapse-py3 (1.122.0) stable; urgency=medium
* New Synapse release 1.122.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
* New Synapse release 1.122.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
matrix-synapse-py3 (1.121.1) stable; urgency=medium
* New Synapse release 1.121.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
matrix-synapse-py3 (1.121.0) stable; urgency=medium
* New Synapse release 1.121.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
* New Synapse release 1.121.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Dec 2024 14:47:23 +0000
matrix-synapse-py3 (1.120.2) stable; urgency=medium
* New synapse release 1.120.2.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 15:43:37 +0000
matrix-synapse-py3 (1.120.1) stable; urgency=medium
* New synapse release 1.120.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 09:07:57 +0000
matrix-synapse-py3 (1.120.0) stable; urgency=medium
* New synapse release 1.120.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2024 13:10:23 +0000
matrix-synapse-py3 (1.120.0~rc1) stable; urgency=medium
* New Synapse release 1.120.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Nov 2024 15:02:21 +0000
matrix-synapse-py3 (1.119.0) stable; urgency=medium
* New Synapse release 1.119.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Nov 2024 13:57:51 +0000
matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium
* New Synapse release 1.119.0rc2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Nov 2024 14:33:02 +0000
matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium
* New Synapse release 1.119.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2024 08:59:43 -0700
matrix-synapse-py3 (1.118.0) stable; urgency=medium
* New Synapse release 1.118.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2024 15:29:53 +0100
matrix-synapse-py3 (1.118.0~rc1) stable; urgency=medium
* New Synapse release 1.118.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Oct 2024 11:48:14 +0100
matrix-synapse-py3 (1.117.0) stable; urgency=medium
* New Synapse release 1.117.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Oct 2024 10:46:30 +0100
matrix-synapse-py3 (1.117.0~rc1) stable; urgency=medium
* New Synapse release 1.117.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Oct 2024 14:37:11 +0100
matrix-synapse-py3 (1.116.0) stable; urgency=medium
* New Synapse release 1.116.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Oct 2024 11:14:07 +0100
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
* New synapse release 1.116.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 26 Sep 2024 13:28:43 +0000
matrix-synapse-py3 (1.116.0~rc1) stable; urgency=medium
* New synapse release 1.116.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Sep 2024 09:34:07 +0000
matrix-synapse-py3 (1.115.0) stable; urgency=medium
* New Synapse release 1.115.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Sep 2024 14:32:10 +0100
matrix-synapse-py3 (1.115.0~rc2) stable; urgency=medium
* New Synapse release 1.115.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Sep 2024 11:10:15 +0100
matrix-synapse-py3 (1.115.0~rc1) stable; urgency=medium
* New Synapse release 1.115.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Sep 2024 08:39:09 -0600
matrix-synapse-py3 (1.114.0) stable; urgency=medium
* New Synapse release 1.114.0.
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Sep 2024 15:14:53 +0100
matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium
* New Synapse release 1.114.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 16:38:05 +0100
matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium
* New Synapse release 1.114.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 15:35:13 +0100
matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium
* New synapse release 1.114.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Aug 2024 12:55:28 +0000
matrix-synapse-py3 (1.113.0) stable; urgency=medium
* New Synapse release 1.113.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Aug 2024 14:36:56 +0100
matrix-synapse-py3 (1.113.0~rc1) stable; urgency=medium
* New Synapse release 1.113.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Aug 2024 12:23:23 +0100
matrix-synapse-py3 (1.112.0) stable; urgency=medium
* New Synapse release 1.112.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 17:15:48 +0100
matrix-synapse-py3 (1.112.0~rc1) stable; urgency=medium
* New Synapse release 1.112.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Jul 2024 08:58:55 -0600
matrix-synapse-py3 (1.111.1) stable; urgency=medium
* New Synapse release 1.111.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jul 2024 16:13:52 +0100
matrix-synapse-py3 (1.111.0) stable; urgency=medium
* New Synapse release 1.111.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jul 2024 12:42:46 +0200
matrix-synapse-py3 (1.111.0~rc2) stable; urgency=medium
* New synapse release 1.111.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Jul 2024 08:46:54 +0000
matrix-synapse-py3 (1.111.0~rc1) stable; urgency=medium
* New synapse release 1.111.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Jul 2024 09:49:25 +0000
matrix-synapse-py3 (1.110.0) stable; urgency=medium
* New Synapse release 1.110.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 03 Jul 2024 09:08:59 -0600
matrix-synapse-py3 (1.110.0~rc3) stable; urgency=medium
* New Synapse release 1.110.0rc3.
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Jul 2024 08:28:56 -0600
matrix-synapse-py3 (1.110.0~rc2) stable; urgency=medium
* New Synapse release 1.110.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 18:14:48 +0200
matrix-synapse-py3 (1.110.0~rc1) stable; urgency=medium
* `register_new_matrix_user` now supports a --password-file and a --exists-ok flag.
* New Synapse release 1.110.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Jun 2024 14:07:56 +0200
matrix-synapse-py3 (1.109.0) stable; urgency=medium
* New synapse release 1.109.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jun 2024 09:45:15 +0000
matrix-synapse-py3 (1.109.0~rc3) stable; urgency=medium
* New synapse release 1.109.0rc3.
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Jun 2024 12:05:24 +0000
matrix-synapse-py3 (1.109.0~rc2) stable; urgency=medium
* New synapse release 1.109.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jun 2024 13:20:17 +0000
matrix-synapse-py3 (1.109.0~rc1) stable; urgency=medium
* New Synapse release 1.109.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jun 2024 09:42:46 +0100
matrix-synapse-py3 (1.108.0) stable; urgency=medium
* New Synapse release 1.108.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 May 2024 11:54:22 +0100
matrix-synapse-py3 (1.108.0~rc1) stable; urgency=medium
* New Synapse release 1.108.0rc1.

View File

@@ -1,13 +1,10 @@
.\" generated with Ronn-NG/v0.10.1
.\" http://github.com/apjanke/ronn-ng/tree/0.10.1
.TH "HASH_PASSWORD" "1" "August 2024" ""
.\" generated with Ronn-NG/v0.8.0
.\" http://github.com/apjanke/ronn-ng/tree/0.8.0
.TH "HASH_PASSWORD" "1" "July 2021" "" ""
.SH "NAME"
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
.SH "SYNOPSIS"
.TS
allbox;
\fBhash_password\fR [\fB\-p\fR \fB\-\-password\fR [password]] [\fB\-c\fR \fB\-\-config\fR \fIfile\fR]
.TE
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
.SH "DESCRIPTION"
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
.P
@@ -23,7 +20,7 @@ bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
.SH "OPTIONS"
.TP
\fB\-p\fR, \fB\-\-password\fR
Read the password form the command line if [password] is supplied, or from \fBSTDIN\fR\. If not, prompt the user and read the password from the tty prompt\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
.TP
\fB\-c\fR, \fB\-\-config\fR
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
@@ -36,17 +33,7 @@ $2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
.fi
.IP "" 0
.P
Hash from the stdin:
.IP "" 4
.nf
$ cat password_file | hash_password
Password:
Confirm password:
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
.fi
.IP "" 0
.P
Hash from the prompt:
Hash from the STDIN:
.IP "" 4
.nf
$ hash_password
@@ -66,6 +53,6 @@ $2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
.fi
.IP "" 0
.SH "COPYRIGHT"
This man page was written by Rahul De «rahulde@swecha\.net» for Debian GNU/Linux distribution\.
This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
.SH "SEE ALSO"
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)

View File

@@ -1,182 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv='content-type' content='text/html;charset=utf-8'>
<meta name='generator' content='Ronn-NG/v0.10.1 (http://github.com/apjanke/ronn-ng/tree/0.10.1)'>
<title>hash_password(1) - Calculate the hash of a new password, so that passwords can be reset</title>
<style type='text/css' media='all'>
/* style: man */
body#manpage {margin:0}
.mp {max-width:100ex;padding:0 9ex 1ex 4ex}
.mp p,.mp pre,.mp ul,.mp ol,.mp dl {margin:0 0 20px 0}
.mp h2 {margin:10px 0 0 0}
.mp > p,.mp > pre,.mp > ul,.mp > ol,.mp > dl {margin-left:8ex}
.mp h3 {margin:0 0 0 4ex}
.mp dt {margin:0;clear:left}
.mp dt.flush {float:left;width:8ex}
.mp dd {margin:0 0 0 9ex}
.mp h1,.mp h2,.mp h3,.mp h4 {clear:left}
.mp pre {margin-bottom:20px}
.mp pre+h2,.mp pre+h3 {margin-top:22px}
.mp h2+pre,.mp h3+pre {margin-top:5px}
.mp img {display:block;margin:auto}
.mp h1.man-title {display:none}
.mp,.mp code,.mp pre,.mp tt,.mp kbd,.mp samp,.mp h3,.mp h4 {font-family:monospace;font-size:14px;line-height:1.42857142857143}
.mp h2 {font-size:16px;line-height:1.25}
.mp h1 {font-size:20px;line-height:2}
.mp {text-align:justify;background:#fff}
.mp,.mp code,.mp pre,.mp pre code,.mp tt,.mp kbd,.mp samp {color:#131211}
.mp h1,.mp h2,.mp h3,.mp h4 {color:#030201}
.mp u {text-decoration:underline}
.mp code,.mp strong,.mp b {font-weight:bold;color:#131211}
.mp em,.mp var {font-style:italic;color:#232221;text-decoration:none}
.mp a,.mp a:link,.mp a:hover,.mp a code,.mp a pre,.mp a tt,.mp a kbd,.mp a samp {color:#0000ff}
.mp b.man-ref {font-weight:normal;color:#434241}
.mp pre {padding:0 4ex}
.mp pre code {font-weight:normal;color:#434241}
.mp h2+pre,h3+pre {padding-left:0}
ol.man-decor,ol.man-decor li {margin:3px 0 10px 0;padding:0;float:left;width:33%;list-style-type:none;text-transform:uppercase;color:#999;letter-spacing:1px}
ol.man-decor {width:100%}
ol.man-decor li.tl {text-align:left}
ol.man-decor li.tc {text-align:center;letter-spacing:4px}
ol.man-decor li.tr {text-align:right;float:right}
</style>
</head>
<!--
The following styles are deprecated and will be removed at some point:
div#man, div#man ol.man, div#man ol.head, div#man ol.man.
The .man-page, .man-decor, .man-head, .man-foot, .man-title, and
.man-navigation should be used instead.
-->
<body id='manpage'>
<div class='mp' id='man'>
<div class='man-navigation' style='display:none'>
<a href="#NAME">NAME</a>
<a href="#SYNOPSIS">SYNOPSIS</a>
<a href="#DESCRIPTION">DESCRIPTION</a>
<a href="#FILES">FILES</a>
<a href="#OPTIONS">OPTIONS</a>
<a href="#EXAMPLES">EXAMPLES</a>
<a href="#COPYRIGHT">COPYRIGHT</a>
<a href="#SEE-ALSO">SEE ALSO</a>
</div>
<ol class='man-decor man-head man head'>
<li class='tl'>hash_password(1)</li>
<li class='tc'></li>
<li class='tr'>hash_password(1)</li>
</ol>
<h2 id="NAME">NAME</h2>
<p class="man-name">
<code>hash_password</code> - <span class="man-whatis">Calculate the hash of a new password, so that passwords can be reset</span>
</p>
<h2 id="SYNOPSIS">SYNOPSIS</h2>
<table>
<tbody>
<tr>
<td>
<code>hash_password</code> [<code>-p</code>
</td>
<td>
<code>--password</code> [password]] [<code>-c</code>
</td>
<td>
<code>--config</code> <var>file</var>]</td>
</tr>
</tbody>
</table>
<h2 id="DESCRIPTION">DESCRIPTION</h2>
<p><strong>hash_password</strong> calculates the hash of a supplied password using bcrypt.</p>
<p><code>hash_password</code> takes a password as an parameter either on the command line
or the <code>STDIN</code> if not supplied.</p>
<p>It accepts an YAML file which can be used to specify parameters like the
number of rounds for bcrypt and password_config section having the pepper
value used for the hashing. By default <code>bcrypt_rounds</code> is set to <strong>12</strong>.</p>
<p>The hashed password is written on the <code>STDOUT</code>.</p>
<h2 id="FILES">FILES</h2>
<p>A sample YAML file accepted by <code>hash_password</code> is described below:</p>
<p>bcrypt_rounds: 17
password_config:
pepper: "random hashing pepper"</p>
<h2 id="OPTIONS">OPTIONS</h2>
<dl>
<dt>
<code>-p</code>, <code>--password</code>
</dt>
<dd>Read the password form the command line if [password] is supplied, or from <code>STDIN</code>.
If not, prompt the user and read the password from the tty prompt.
It is not recommended to type the password on the command line
directly. Use the STDIN instead.</dd>
<dt>
<code>-c</code>, <code>--config</code>
</dt>
<dd>Read the supplied YAML <var>file</var> containing the options <code>bcrypt_rounds</code>
and the <code>password_config</code> section containing the <code>pepper</code> value.</dd>
</dl>
<h2 id="EXAMPLES">EXAMPLES</h2>
<p>Hash from the command line:</p>
<pre><code>$ hash_password -p "p@ssw0rd"
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
</code></pre>
<p>Hash from the stdin:</p>
<pre><code>$ cat password_file | hash_password
Password:
Confirm password:
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
</code></pre>
<p>Hash from the prompt:</p>
<pre><code>$ hash_password
Password:
Confirm password:
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
</code></pre>
<p>Using a config file:</p>
<pre><code>$ hash_password -c config.yml
Password:
Confirm password:
$2b$12$CwI.wBNr.w3kmiUlV3T5s.GT2wH7uebDCovDrCOh18dFedlANK99O
</code></pre>
<h2 id="COPYRIGHT">COPYRIGHT</h2>
<p>This man page was written by Rahul De «rahulde@swecha.net»
for Debian GNU/Linux distribution.</p>
<h2 id="SEE-ALSO">SEE ALSO</h2>
<p><span class="man-ref">synctl<span class="s">(1)</span></span>, <span class="man-ref">synapse_port_db<span class="s">(1)</span></span>, <span class="man-ref">register_new_matrix_user<span class="s">(1)</span></span>, <span class="man-ref">synapse_review_recent_signups<span class="s">(1)</span></span></p>
<ol class='man-decor man-foot man foot'>
<li class='tl'></li>
<li class='tc'>August 2024</li>
<li class='tr'>hash_password(1)</li>
</ol>
</div>
</body>
</html>

View File

@@ -29,8 +29,8 @@ A sample YAML file accepted by `hash_password` is described below:
## OPTIONS
* `-p`, `--password`:
Read the password form the command line if [password] is supplied, or from `STDIN`.
If not, prompt the user and read the password from the tty prompt.
Read the password form the command line if [password] is supplied.
If not, prompt the user and read the password form the `STDIN`.
It is not recommended to type the password on the command line
directly. Use the STDIN instead.
@@ -45,14 +45,7 @@ Hash from the command line:
$ hash_password -p "p@ssw0rd"
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
Hash from the stdin:
$ cat password_file | hash_password
Password:
Confirm password:
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
Hash from the prompt:
Hash from the STDIN:
$ hash_password
Password:

View File

@@ -31,12 +31,8 @@ A sample YAML file accepted by `register_new_matrix_user` is described below:
Local part of the new user. Will prompt if omitted.
* `-p`, `--password`:
New password for user. Will prompt if this option and `--password-file` are omitted.
Supplying the password on the command line is not recommended.
* `--password-file`:
File containing the new password for user. If set, overrides `--password`.
This is a more secure alternative to specifying the password on the command line.
New password for user. Will prompt if omitted. Supplying the password
on the command line is not recommended. Use the STDIN instead.
* `-a`, `--admin`:
Register new user as an admin. Will prompt if omitted.
@@ -48,9 +44,6 @@ A sample YAML file accepted by `register_new_matrix_user` is described below:
Shared secret as defined in server config file. This is an optional
parameter as it can be also supplied via the YAML file.
* `--exists-ok`:
Do not fail if the user already exists. The user account will be not updated in this case.
* `server_url`:
URL of the home server. Defaults to 'https://localhost:8448'.

2
debian/templates vendored
View File

@@ -5,7 +5,7 @@ _Description: Name of the server:
servers via federation. This is normally the public hostname of the
server running synapse, but can be different if you set up delegation.
Please refer to the delegation documentation in this case:
https://element-hq.github.io/synapse/latest/delegate.html.
https://github.com/element-hq/synapse/blob/master/docs/delegate.md.
Template: matrix-synapse/report-stats
Type: boolean

View File

@@ -20,14 +20,14 @@
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
# in `poetry export` in the past.
ARG PYTHON_VERSION=3.12
ARG PYTHON_VERSION=3.11
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
@@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
###
### Stage 1: builder
###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder
# install the OS build deps
RUN \

View File

@@ -24,7 +24,7 @@ ARG distro=""
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
# it's not obviously easier to use that than to build our own.)
FROM docker.io/library/${distro} AS builder
FROM docker.io/library/${distro} as builder
RUN apt-get update -qq -o Acquire::Languages=none
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
@@ -73,8 +73,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
curl \
debhelper \
devscripts \
# Required for building cffi from source.
libffi-dev \
libsystemd-dev \
lsb-release \
pkg-config \

View File

@@ -243,3 +243,26 @@ healthcheck:
Jemalloc is embedded in the image and will be used instead of the default allocator.
You can read about jemalloc by reading the Synapse
[Admin FAQ](https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).
## Modules
Synapse supports loading additional modules, using the
[`modules`](https://element-hq.github.io/synapse/latest/modules/index.html)
config. Synapse will look for these modules `/modules`.
To install a package, simply run:
```
pip install --target <module_directory> <package>
```
Where `<module_directory>` is the directory mounted to `/modules`, and
`<package>` is either the package name or a path to the package. See
`pip install` for more details.
**Note**: Packages already installed as part of Synapse cannot be overridden by
different versions of the package in `/modules`, e.g. if the Synapse version
uses Twisted 24.3.0 then installing Twisted 23.10.0 in `/modules` won't have any
effect. This can cause issues if the required version of a package is different
between Synapse and the module being installed.

View File

@@ -11,9 +11,6 @@ DIST=$(cut -d ':' -f2 <<< "${distro:?}")
cp -aT /synapse/source /synapse/build
cd /synapse/build
# Delete any existing `.so` files to ensure a clean build.
rm -f /synapse/build/synapse/*.so
# if this is a prerelease, set the Section accordingly.
#
# When the package is later added to the package repo, reprepro will use the

View File

@@ -7,7 +7,6 @@
#}
## Server ##
public_baseurl: http://127.0.0.1:8008/
report_stats: False
trusted_key_servers: []
enable_registration: true
@@ -85,14 +84,6 @@ rc_invites:
per_user:
per_second: 1000
burst_count: 1000
per_issuer:
per_second: 1000
burst_count: 1000
rc_presence:
per_user:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999
@@ -113,16 +104,8 @@ experimental_features:
msc3967_enabled: true
# Expose a room summary for public rooms
msc3266_enabled: true
# Send to-device messages to application services
msc2409_to_device_messages_enabled: true
# Allow application services to masquerade devices
msc3202_device_masquerading: true
# Sending device list changes, one-time key counts and fallback key usage to application services
msc3202_transaction_extensions: true
# Proxy OTK claim requests to exclusive ASes
msc3983_appservice_otk_claims: true
# Proxy key queries to exclusive ASes
msc3984_appservice_key_query: true
msc4115_membership_on_events: true
server_notices:
system_mxid_localpart: _server
@@ -130,9 +113,6 @@ server_notices:
system_mxid_avatar_url: ""
room_name: "Server Alert"
# Enable delayed events (msc4140)
max_event_delay_duration: 24h
# Disable sync cache so that initial `/sync` requests are up-to-date.
caches:

View File

@@ -38,13 +38,10 @@ server {
{% if using_unix_sockets %}
proxy_pass http://unix:/run/main_public.sock;
{% else %}
# note: do not add a path (even a single /) after the port in `proxy_pass`,
# otherwise nginx will canonicalise the URI and cause signature verification
# errors.
proxy_pass http://localhost:8080;
{% endif %}
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host:$server_port;
proxy_set_header Host $host;
}
}

View File

@@ -176,6 +176,7 @@ app_service_config_files:
{% endif %}
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
expire_access_token: False
## Signing Keys ##

View File

@@ -117,7 +117,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
},
"media_repository": {
"app": "synapse.app.generic_worker",
"listener_resources": ["media", "client"],
"listener_resources": ["media"],
"endpoint_patterns": [
"^/_matrix/media/",
"^/_synapse/admin/v1/purge_media_cache$",
@@ -125,8 +125,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_synapse/admin/v1/user/.*/media.*$",
"^/_synapse/admin/v1/media/.*$",
"^/_synapse/admin/v1/quarantine_media/.*$",
"^/_matrix/client/v1/media/.*$",
"^/_matrix/federation/v1/media/.*$",
],
# The first configured media worker will run the media background jobs
"shared_extra_conf": {

View File

@@ -269,6 +269,15 @@ running with 'migrate_config'. See the README for more details.
args += ["--config-path", config_path]
# Add the `/modules` directly to python search path, which allows users to
# add custom modules.
#
# We want to add the directory *last* so that nothing can overwrite the
# existing package versions. Therefore we load the current path and append
# `/modules` to that
path = ":".join(sys.path)
environ["PYTHONPATH"] = f"{path}:/modules"
log("Starting synapse with args " + " ".join(args))
args = [sys.executable] + args

View File

@@ -54,7 +54,6 @@
- [Using `synctl` with Workers](synctl_workers.md)
- [Systemd](systemd-with-workers/README.md)
- [Administration](usage/administration/README.md)
- [Backups](usage/administration/backups.md)
- [Admin API](usage/administration/admin_api/README.md)
- [Account Validity](admin_api/account_validity.md)
- [Background Updates](usage/administration/admin_api/background_updates.md)

View File

@@ -60,11 +60,10 @@ paginate through.
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
* `dir`: string - Direction of event report order. Whether to fetch the most recent
first (`b`) or the oldest first (`f`). Defaults to `b`.
* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event
and wrote the reason.
* `room_id`: optional string - Filter by room id.
* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who
the report was made against.
* `user_id`: string - Is optional and filters to only return users with user IDs that
contain this value. This is the user who reported the event and wrote the reason.
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
contain this value.
**Response**

View File

@@ -1,18 +1,21 @@
# Experimental Features API
This API allows a server administrator to enable or disable some experimental features on a per-user
basis. The currently supported features are:
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client
- [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575): enable experimental sliding sync support
- [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222): adding `state_after` to sync v2
basis. The currently supported features are:
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
presence state enabled
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
UIA when first uploading cross-signing keys.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
## Enabling/Disabling Features
This API allows a server administrator to enable experimental features for a given user. The request must
This API allows a server administrator to enable experimental features for a given user. The request must
provide a body containing the user id and listing the features to enable/disable in the following format:
```json
{
@@ -32,7 +35,7 @@ PUT /_synapse/admin/v1/experimental_features/<user_id>
```
## Listing Enabled Features
To list which features are enabled/disabled for a given user send a request to the following API:
```
@@ -49,4 +52,4 @@ user like so:
"msc3967": false
}
}
```
```

View File

@@ -36,10 +36,6 @@ The following query parameters are available:
- the room's name,
- the local part of the room's canonical alias, or
- the complete (local and server part) room's id (case sensitive).
* `public_rooms` - Optional flag to filter public rooms. If `true`, only public rooms are queried. If `false`, public rooms are excluded from
the query. When the flag is absent (the default), **both** public and non-public rooms are included in the search results.
* `empty_rooms` - Optional flag to filter empty rooms. A room is empty if joined_members is zero. If `true`, only empty rooms are queried. If `false`, empty rooms are excluded from
the query. When the flag is absent (the default), **both** empty and non-empty rooms are included in the search results.
Defaults to no filtering.
@@ -385,13 +381,6 @@ The API is:
GET /_synapse/admin/v1/rooms/<room_id>/state
```
**Parameters**
The following query parameter is available:
* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events
of this type will be returned (regardless of their `state_key` value).
A response body like the following is returned:
```json

View File

@@ -40,7 +40,6 @@ It returns a JSON body like the following:
"erased": false,
"shadow_banned": 0,
"creation_ts": 1560432506,
"last_seen_ts": 1732919539393,
"appservice_id": null,
"consent_server_notice_sent": null,
"consent_version": null,
@@ -56,8 +55,7 @@ It returns a JSON body like the following:
}
],
"user_type": null,
"locked": false,
"suspended": false
"locked": false
}
```
@@ -478,9 +476,9 @@ with a body of:
}
```
## List joined rooms of a user
## List room memberships of a user
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
Gets a list of all `room_id` that a specific `user_id` is member.
The API is:
@@ -517,73 +515,6 @@ The following fields are returned in the JSON response body:
- `joined_rooms` - An array of `room_id`.
- `total` - Number of rooms.
## Get the number of invites sent by the user
Fetches the number of invites sent by the provided user ID across all rooms
after the given timestamp.
```
GET /_synapse/admin/v1/users/$user_id/sent_invite_count
```
**Parameters**
The following parameters should be set in the URL:
* `user_id`: fully qualified: for example, `@user:server.com`
The following should be set as query parameters in the URL:
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
invites sent at or after the provided timestamp will be returned.
This works by comparing the provided timestamp to the `received_ts`
column in the `events` table.
Note: https://currentmillis.com/ is a useful tool for converting dates
into timestamps and vice versa.
A response body like the following is returned:
```json
{
"invite_count": 30
}
```
_Added in Synapse 1.122.0_
## Get the cumulative number of rooms a user has joined after a given timestamp
Fetches the number of rooms that the user joined after the given timestamp, even
if they have subsequently left/been banned from those rooms.
```
GET /_synapse/admin/v1/users/$<user_id/cumulative_joined_room_count
```
**Parameters**
The following parameters should be set in the URL:
* `user_id`: fully qualified: for example, `@user:server.com`
The following should be set as query parameters in the URL:
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
invites sent at or after the provided timestamp will be returned.
This works by comparing the provided timestamp to the `received_ts`
column in the `events` table.
Note: https://currentmillis.com/ is a useful tool for converting dates
into timestamps and vice versa.
A response body like the following is returned:
```json
{
"cumulative_joined_room_count": 30
}
```
_Added in Synapse 1.122.0_
## Account Data
Gets information about account data for a specific `user_id`.
@@ -1430,88 +1361,3 @@ Returns a `404` HTTP status code if no user was found, with a response body like
```
_Added in Synapse 1.72.0._
## Redact all the events of a user
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
The API is
```
POST /_synapse/admin/v1/user/$user_id/redact
{
"rooms": ["!roomid1", "!roomid2"]
}
```
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
otherwise all the events in the rooms provided in the request will be redacted.
The API starts redaction process running, and returns immediately with a JSON body with
a redact id which can be used to query the status of the redaction process:
```json
{
"redact_id": "<opaque id>"
}
```
**Parameters**
The following parameters should be set in the URL:
- `user_id` - The fully qualified MXID of the user: for example, `@user:server.com`.
The following JSON body parameter must be provided:
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
the user is a member of will be redacted
_Added in Synapse 1.116.0._
The following JSON body parameters are optional:
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
## Check the status of a redaction process
It is possible to query the status of the background task for redacting a user's events.
The status can be queried up to 24 hours after completion of the task,
or until Synapse is restarted (whichever happens first).
The API is:
```
GET /_synapse/admin/v1/user/redact_status/$redact_id
```
A response body like the following is returned:
```
{
"status": "active",
"failed_redactions": [],
}
```
**Parameters**
The following parameters should be set in the URL:
* `redact_id` - string - The ID for this redaction process, provided when the redaction was requested.
**Response**
The following fields are returned in the JSON response body:
- `status` - string - one of scheduled/active/completed/failed, indicating the status of the redaction job
- `failed_redactions` - dictionary - the keys of the dict are event ids the process was unable to redact, if any, and the values are
the corresponding error that caused the redaction to fail
_Added in Synapse 1.116.0._

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,9 @@ errors in code.
The necessary tools are:
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors and enforce a consistent style; and
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
See [the contributing guide](development/contributing_guide.md#run-the-linters) for instructions

View File

@@ -322,7 +322,7 @@ The following command will let you run the integration test with the most common
configuration:
```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
@@ -449,9 +449,9 @@ For example, a fix in PR #1234 would have its changelog entry in
> The security levels of Florbs are now validated when received
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
If there are multiple pull requests involved in a single bugfix/feature/etc, then the
content for each `changelog.d` file and file extension should be the same. Towncrier
will merge the matching files together into a single changelog entry when we come to
If there are multiple pull requests involved in a single bugfix/feature/etc,
then the content for each `changelog.d` file should be the same. Towncrier will
merge the matching files together into a single changelog entry when we come to
release.
### How do I know what to call the changelog file before I create the PR?

View File

@@ -21,10 +21,8 @@ incrementing integer, but backfilled events start with `stream_ordering=-1` and
---
- Incremental `/sync?since=xxx` returns things in the order they arrive at the server
(`stream_ordering`).
- Initial `/sync`, `/messages` (and `/backfill` in the federation API) return them in
the order determined by the event graph `(topological_ordering, stream_ordering)`.
- `/sync` returns things in the order they arrive at the server (`stream_ordering`).
- `/messages` (and `/backfill` in the federation API) return them in the order determined by the event graph `(topological_ordering, stream_ordering)`.
The general idea is that, if you're following a room in real-time (i.e.
`/sync`), you probably want to see the messages as they arrive at your server,

View File

@@ -1,94 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="41.440346mm"
height="10.383124mm"
viewBox="0 0 41.440346 10.383125"
version="1.1"
id="svg1"
xml:space="preserve"
sodipodi:docname="element_logo_white_bg.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="mm"
showgrid="false"
inkscape:export-bgcolor="#ffffffff" /><defs
id="defs1" /><g
id="layer1"
transform="translate(-84.803844,-143.2075)"
inkscape:export-filename="element_logo_white_bg.svg"
inkscape:export-xdpi="96"
inkscape:export-ydpi="96"><g
style="fill:none"
id="g1"
transform="matrix(0.26458333,0,0,0.26458333,85.841658,144.26667)"><rect
style="display:inline;fill:#ffffff;fill-opacity:1;stroke:#ffffff;stroke-width:1.31041;stroke-dasharray:none;stroke-opacity:1"
id="rect20"
width="155.31451"
height="37.932892"
x="-3.2672384"
y="-3.3479743"
rx="3.3718522"
ry="3.7915266"
transform="translate(-2.1259843e-6)"
inkscape:label="rect20"
inkscape:export-filename="rect20.svg"
inkscape:export-xdpi="96"
inkscape:export-ydpi="96" /><path
fill-rule="evenodd"
clip-rule="evenodd"
d="M 16,32 C 24.8366,32 32,24.8366 32,16 32,7.16344 24.8366,0 16,0 7.16344,0 0,7.16344 0,16 0,24.8366 7.16344,32 16,32 Z"
fill="#0dbd8b"
id="path1" /><path
fill-rule="evenodd"
clip-rule="evenodd"
d="m 13.0756,7.455 c 0,-0.64584 0.5247,-1.1694 1.1719,-1.1694 4.3864,0 7.9423,3.54853 7.9423,7.9259 0,0.6458 -0.5246,1.1694 -1.1718,1.1694 -0.6472,0 -1.1719,-0.5236 -1.1719,-1.1694 0,-3.0857 -2.5066,-5.58711 -5.5986,-5.58711 -0.6472,0 -1.1719,-0.52355 -1.1719,-1.16939 z"
fill="#ffffff"
id="path2" /><path
fill-rule="evenodd"
clip-rule="evenodd"
d="m 24.5424,13.042 c 0.6472,0 1.1719,0.5235 1.1719,1.1694 0,4.3773 -3.5559,7.9258 -7.9424,7.9258 -0.6472,0 -1.1718,-0.5235 -1.1718,-1.1693 0,-0.6459 0.5246,-1.1694 1.1718,-1.1694 3.0921,0 5.5987,-2.5015 5.5987,-5.5871 0,-0.6459 0.5247,-1.1694 1.1718,-1.1694 z"
fill="#ffffff"
id="path3" /><path
fill-rule="evenodd"
clip-rule="evenodd"
d="m 18.9446,24.5446 c 0,0.6459 -0.5247,1.1694 -1.1718,1.1694 -4.3865,0 -7.94239,-3.5485 -7.94239,-7.9258 0,-0.6459 0.52469,-1.1694 1.17179,-1.1694 0.6472,0 1.1719,0.5235 1.1719,1.1694 0,3.0856 2.5066,5.587 5.5987,5.587 0.6471,0 1.1718,0.5236 1.1718,1.1694 z"
fill="#ffffff"
id="path4" /><path
fill-rule="evenodd"
clip-rule="evenodd"
d="m 7.45823,18.9576 c -0.64718,0 -1.17183,-0.5235 -1.17183,-1.1694 0,-4.3773 3.55591,-7.92581 7.9423,-7.92581 0.6472,0 1.1719,0.52351 1.1719,1.16941 0,0.6458 -0.5247,1.1694 -1.1719,1.1694 -3.092,0 -5.59864,2.5014 -5.59864,5.587 0,0.6459 -0.52465,1.1694 -1.17183,1.1694 z"
fill="#ffffff"
id="path5" /><path
d="M 56.2856,18.1428 H 44.9998 c 0.1334,1.181 0.5619,2.1238 1.2858,2.8286 0.7238,0.6857 1.6761,1.0286 2.8571,1.0286 0.7809,0 1.4857,-0.1905 2.1143,-0.5715 0.6286,-0.3809 1.0762,-0.8952 1.3428,-1.5428 h 3.4286 c -0.4571,1.5047 -1.3143,2.7238 -2.5714,3.6571 -1.2381,0.9143 -2.7048,1.3715 -4.4,1.3715 -2.2095,0 -4,-0.7334 -5.3714,-2.2 -1.3524,-1.4667 -2.0286,-3.3239 -2.0286,-5.5715 0,-2.1905 0.6857,-4.0285 2.0571,-5.5143 1.3715,-1.4857 3.1429,-2.22853 5.3143,-2.22853 2.1714,0 3.9238,0.73333 5.2572,2.20003 1.3523,1.4476 2.0285,3.2762 2.0285,5.4857 z m -7.2572,-5.9714 c -1.0667,0 -1.9524,0.3143 -2.6571,0.9429 -0.7048,0.6285 -1.1429,1.4666 -1.3143,2.5142 h 7.8857 c -0.1524,-1.0476 -0.5714,-1.8857 -1.2571,-2.5142 -0.6858,-0.6286 -1.5715,-0.9429 -2.6572,-0.9429 z"
fill="#000000"
id="path6" /><path
d="M 58.6539,20.1428 V 3.14282 h 3.4 V 20.2 c 0,0.7619 0.419,1.1428 1.2571,1.1428 l 0.6,-0.0285 v 3.2285 c -0.3238,0.0572 -0.6667,0.0857 -1.0286,0.0857 -1.4666,0 -2.5428,-0.3714 -3.2285,-1.1142 -0.6667,-0.7429 -1,-1.8667 -1,-3.3715 z"
fill="#000000"
id="path7" /><path
d="M 79.7454,18.1428 H 68.4597 c 0.1333,1.181 0.5619,2.1238 1.2857,2.8286 0.7238,0.6857 1.6762,1.0286 2.8571,1.0286 0.781,0 1.4857,-0.1905 2.1143,-0.5715 0.6286,-0.3809 1.0762,-0.8952 1.3429,-1.5428 h 3.4285 c -0.4571,1.5047 -1.3143,2.7238 -2.5714,3.6571 -1.2381,0.9143 -2.7048,1.3715 -4.4,1.3715 -2.2095,0 -4,-0.7334 -5.3714,-2.2 -1.3524,-1.4667 -2.0286,-3.3239 -2.0286,-5.5715 0,-2.1905 0.6857,-4.0285 2.0571,-5.5143 1.3715,-1.4857 3.1429,-2.22853 5.3143,-2.22853 2.1715,0 3.9238,0.73333 5.2572,2.20003 1.3524,1.4476 2.0285,3.2762 2.0285,5.4857 z m -7.2572,-5.9714 c -1.0666,0 -1.9524,0.3143 -2.6571,0.9429 -0.7048,0.6285 -1.1429,1.4666 -1.3143,2.5142 h 7.8857 c -0.1524,-1.0476 -0.5714,-1.8857 -1.2571,-2.5142 -0.6857,-0.6286 -1.5715,-0.9429 -2.6572,-0.9429 z"
fill="#000000"
id="path8" /><path
d="m 95.0851,16.0571 v 8.5143 h -3.4 v -8.8857 c 0,-2.2476 -0.9333,-3.3714 -2.8,-3.3714 -1.0095,0 -1.819,0.3238 -2.4286,0.9714 -0.5904,0.6476 -0.8857,1.5333 -0.8857,2.6571 v 8.6286 h -3.4 V 9.74282 h 3.1429 v 1.97148 c 0.3619,-0.6667 0.9143,-1.2191 1.6571,-1.6572 0.7429,-0.43809 1.6667,-0.65713 2.7714,-0.65713 2.0572,0 3.5429,0.78093 4.4572,2.34283 1.2571,-1.5619 2.9333,-2.34283 5.0286,-2.34283 1.733,0 3.067,0.54285 4,1.62853 0.933,1.0667 1.4,2.4762 1.4,4.2286 v 9.3143 h -3.4 v -8.8857 c 0,-2.2476 -0.933,-3.3714 -2.8,-3.3714 -1.0286,0 -1.8477,0.3333 -2.4572,1 -0.5905,0.6476 -0.8857,1.5619 -0.8857,2.7428 z"
fill="#000000"
id="path9" /><path
d="m 121.537,18.1428 h -11.286 c 0.133,1.181 0.562,2.1238 1.286,2.8286 0.723,0.6857 1.676,1.0286 2.857,1.0286 0.781,0 1.486,-0.1905 2.114,-0.5715 0.629,-0.3809 1.076,-0.8952 1.343,-1.5428 h 3.429 c -0.458,1.5047 -1.315,2.7238 -2.572,3.6571 -1.238,0.9143 -2.705,1.3715 -4.4,1.3715 -2.209,0 -4,-0.7334 -5.371,-2.2 -1.353,-1.4667 -2.029,-3.3239 -2.029,-5.5715 0,-2.1905 0.686,-4.0285 2.057,-5.5143 1.372,-1.4857 3.143,-2.22853 5.315,-2.22853 2.171,0 3.923,0.73333 5.257,2.20003 1.352,1.4476 2.028,3.2762 2.028,5.4857 z m -7.257,-5.9714 c -1.067,0 -1.953,0.3143 -2.658,0.9429 -0.704,0.6285 -1.142,1.4666 -1.314,2.5142 h 7.886 c -0.153,-1.0476 -0.572,-1.8857 -1.257,-2.5142 -0.686,-0.6286 -1.572,-0.9429 -2.657,-0.9429 z"
fill="#000000"
id="path10" /><path
d="m 127.105,9.74282 v 1.97148 c 0.343,-0.6477 0.905,-1.1905 1.686,-1.6286 0.8,-0.45716 1.762,-0.68573 2.885,-0.68573 1.753,0 3.105,0.53333 4.058,1.60003 0.971,1.0666 1.457,2.4857 1.457,4.2571 v 9.3143 h -3.4 v -8.8857 c 0,-1.0476 -0.248,-1.8667 -0.743,-2.4572 -0.476,-0.6095 -1.21,-0.9142 -2.2,-0.9142 -1.086,0 -1.943,0.3238 -2.572,0.9714 -0.609,0.6476 -0.914,1.5428 -0.914,2.6857 v 8.6 h -3.4 V 9.74282 Z"
fill="#000000"
id="path11" /><path
d="m 147.12,21.5428 v 2.9429 c -0.419,0.1143 -1.009,0.1714 -1.771,0.1714 -2.895,0 -4.343,-1.4571 -4.343,-4.3714 v -7.8286 h -2.257 V 9.74282 h 2.257 V 5.88568 h 3.4 v 3.85714 h 2.772 v 2.71428 h -2.772 v 7.4857 c 0,1.1619 0.552,1.7429 1.657,1.7429 z"
fill="#000000"
id="path12" /></g></g></svg>

Before

Width:  |  Height:  |  Size: 7.5 KiB

View File

@@ -76,9 +76,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when processing an invitation, both when one is created locally or when
receiving an invite over federation. Both inviter and invitee are represented by
their Matrix user ID (e.g. `@alice:example.com`).
Called when processing an invitation. Both inviter and invitee are
represented by their Matrix user ID (e.g. `@alice:example.com`).
The callback must return one of:
@@ -113,9 +112,7 @@ async def user_may_send_3pid_invite(
```
Called when processing an invitation using a third-party identifier (also called a 3PID,
e.g. an email address or a phone number). It is only called when a 3PID invite is created
locally - not when one is received in a room over federation. If the 3PID is already associated
with a Matrix ID, the spam check will go through the `user_may_invite` callback instead.
e.g. an email address or a phone number).
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
invitee is represented by its medium (e.g. "email") and its address
@@ -245,7 +242,7 @@ this callback.
_First introduced in Synapse v1.37.0_
```python
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile, requester_id: str) -> bool
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
```
Called when computing search results in the user directory. The module must return a
@@ -264,8 +261,6 @@ The profile is represented as a dictionary with the following keys:
The module is given a copy of the original dictionary, so modifying it from within the
module cannot modify a user's profile when included in user directory search results.
The requester_id parameter is the ID of the user that called the user directory API.
If multiple modules implement this callback, they will be considered in order. If a
callback returns `False`, Synapse falls through to the next one. The value of the first
callback that does not return `False` will be used. If this happens, Synapse will not call

View File

@@ -336,36 +336,6 @@ but it has a `response_types_supported` which excludes "code" (which we rely on,
is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)),
so we have to disable discovery and configure the URIs manually.
### Forgejo
Forgejo is a fork of Gitea that can act as an OAuth2 provider.
The implementation of OAuth2 is improved compared to Gitea, as it provides a correctly defined `subject_claim` and `scopes`.
Synapse config:
```yaml
oidc_providers:
- idp_id: forgejo
idp_name: Forgejo
discover: false
issuer: "https://your-forgejo.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: client_secret_post
scopes: ["openid", "profile", "email", "groups"]
authorization_endpoint: "https://your-forgejo.com/login/oauth/authorize"
token_endpoint: "https://your-forgejo.com/login/oauth/access_token"
userinfo_endpoint: "https://your-forgejo.com/api/v1/user"
user_mapping_provider:
config:
subject_claim: "sub"
picture_claim: "picture"
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}"
```
### GitHub
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but

View File

@@ -100,10 +100,6 @@ database:
keepalives_count: 3
```
## Backups
Don't forget to [back up](./usage/administration/backups.md#database) your database!
## Tuning Postgres
The default settings should be fine for most deployments. For larger
@@ -246,11 +242,12 @@ host all all ::1/128 ident
### Fixing incorrect `COLLATE` or `CTYPE`
Synapse will refuse to start when using a database with incorrect values of
`COLLATE` and `CTYPE` unless the config flag `allow_unsafe_locale`, found in the
`database` section of the config, is set to true. Using different locales can
cause issues if the locale library is updated from underneath the database, or
if a different version of the locale is used on any replicas.
Synapse will refuse to set up a new database if it has the wrong values of
`COLLATE` and `CTYPE` set. Synapse will also refuse to start an existing database with incorrect values
of `COLLATE` and `CTYPE` unless the config flag `allow_unsafe_locale`, found in the
`database` section of the config, is set to true. Using different locales can cause issues if the locale library is updated from
underneath the database, or if a different version of the locale is used on any
replicas.
If you have a database with an unsafe locale, the safest way to fix the issue is to dump the database and recreate it with
the correct locale parameter (as shown above). It is also possible to change the
@@ -259,3 +256,13 @@ however extreme care must be taken to avoid database corruption.
Note that the above may fail with an error about duplicate rows if corruption
has already occurred, and such duplicate rows will need to be manually removed.
### Fixing inconsistent sequences error
Synapse uses Postgres sequences to generate IDs for various tables. A sequence
and associated table can get out of sync if, for example, Synapse has been
downgraded and then upgraded again.
To fix the issue shut down Synapse (including any and all workers) and run the
SQL command included in the error message. Once done Synapse should start
successfully.

View File

@@ -74,7 +74,7 @@ server {
proxy_pass http://localhost:8008;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host:$server_port;
proxy_set_header Host $host;
# Nginx by default only allows file uploads up to 1M in size
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml

View File

@@ -67,7 +67,7 @@ in Synapse can be deactivated.
**NOTE**: This has an impact on security and is for testing purposes only!
To deactivate the certificate validation, the following setting must be added to
your [homeserver.yaml](../usage/configuration/homeserver_sample_config.md).
your [homserver.yaml](../usage/configuration/homeserver_sample_config.md).
```yaml
use_insecure_ssl_client_just_for_testing_do_not_use: true

View File

@@ -52,6 +52,8 @@ architecture via <https://packages.matrix.org/debian/>.
To install the latest release:
TODO UPDATE ALL THIS
```sh
sudo apt install -y lsb-release wget apt-transport-https
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
@@ -157,7 +159,7 @@ sudo pip install py-bcrypt
#### Alpine Linux
Jahway603 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
6543 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
```sh
sudo apk add synapse
@@ -208,7 +210,7 @@ When following this route please make sure that the [Platform-specific prerequis
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.9 or later, up to Python 3.13.
- Python 3.8 or later, up to Python 3.11.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
If building on an uncommon architecture for which pre-built wheels are
@@ -307,62 +309,7 @@ sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
python3-virtualenv libffi-devel openssl-devel python3-devel \
libicu-devel
sudo dnf group install "Development Tools"
```
##### Red Hat Enterprise Linux / Rocky Linux
*Note: The term "RHEL" below refers to both Red Hat Enterprise Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
It's recommended to use the latest Python versions.
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ship with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
These commands should be run as root user.
RHEL 8
```bash
# Enable PowerTools repository
dnf config-manager --set-enabled powertools
```
RHEL 9
```bash
# Enable CodeReady Linux Builder repository
crb enable
```
Install new version of Python. You only need one of these:
```bash
# Python 3.11
dnf install python3.11 python3.11-devel
```
```bash
# Python 3.12
dnf install python3.12 python3.12-devel
```
Finally, install common prerequisites
```bash
dnf install libicu libicu-devel libpq5 libpq5-devel lz4 pkgconf
dnf group install "Development Tools"
```
###### Using venv module instead of virtualenv command
It's recommended to use Python venv module directly rather than the virtualenv command.
* On RHEL 9, virtualenv is only available on [EPEL](https://docs.fedoraproject.org/en-US/epel/).
* On RHEL 8, virtualenv is based on Python 3.6. It does not support creating 3.11/3.12 virtual environments.
Here's an example of creating Python 3.12 virtual environment and installing Synapse from PyPI.
```bash
mkdir -p ~/synapse
# To use Python 3.11, simply use the command "python3.11" instead.
python3.12 -m venv ~/synapse/env
source ~/synapse/env/bin/activate
pip install --upgrade pip
pip install --upgrade setuptools
pip install matrix-synapse
sudo dnf groupinstall "Development Tools"
```
##### macOS
@@ -656,10 +603,6 @@ This also requires the optional `lxml` python dependency to be installed. This
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
means `apt-get install libxml2-dev`, or equivalent for your OS.
### Backups
Don't forget to take [backups](../usage/administration/backups.md) of your new server!
### Troubleshooting Installation
`pip` seems to leak *lots* of memory during installation. For instance, a Linux

View File

@@ -72,8 +72,8 @@ class ExampleSpamChecker:
async def user_may_publish_room(self, userid, room_id):
return True # allow publishing of all rooms
async def check_username_for_spam(self, user_profile, requester_id):
return False # allow all usernames regardless of requester
async def check_username_for_spam(self, user_profile):
return False # allow all usernames
async def check_registration_for_spam(
self,

View File

@@ -117,73 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.122.0
## Dropping support for PostgreSQL 11 and 12
In line with our [deprecation policy](deprecation_policy.md), we've dropped
support for PostgreSQL 11 and 12, as they are no longer supported upstream.
This release of Synapse requires PostgreSQL 13+.
# Upgrading to v1.120.0
## Removal of experimental MSC3886 feature
[MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
has been closed (and will not enter the Matrix spec). As such, we are
removing the experimental support for it in this release.
The `experimental_features.msc3886_endpoint` configuration option has
been removed.
## Authenticated media is now enforced by default
The [`enable_authenticated_media`] configuration option now defaults to true.
This means that clients and remote (federated) homeservers now need to use
the authenticated media endpoints in order to download media from your
homeserver.
As an exception, existing media that was stored on the server prior to
this option changing to `true` will still be accessible over the
unauthenticated endpoints.
The matrix.org homeserver has already been running with this option enabled
since September 2024, so most common clients and homeservers should already
be compatible.
With that said, administrators who wish to disable this feature for broader
compatibility can still do so by manually configuring
`enable_authenticated_media: False`.
[`enable_authenticated_media`]: usage/configuration/config_documentation.md#enable_authenticated_media
# Upgrading to v1.119.0
## Minimum supported Python version
The minimum supported Python version has been increased from v3.8 to v3.9.
You will need Python 3.9+ to run Synapse v1.119.0 (due out Nov 7th, 2024).
If you use current versions of the Matrix.org-distributed Docker images, no action is required.
Please note that support for Ubuntu `focal` was dropped as well since it uses Python 3.8.
# Upgrading to v1.111.0
## New worker endpoints for authenticated client and federation media
[Media repository workers](./workers.md#synapseappmedia_repository) handling
Media APIs can now handle the following endpoint patterns:
```
^/_matrix/client/v1/media/.*$
^/_matrix/federation/v1/media/.*$
```
Please update your reverse proxy configuration.
# Upgrading to v1.106.0
## Minimum supported Rust version

View File

@@ -255,8 +255,6 @@ line to `/etc/default/matrix-synapse`:
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2
*Note*: You may need to set `PYTHONMALLOC=malloc` to ensure that `jemalloc` can accurately calculate memory usage. By default, Python uses its internal small-object allocator, which may interfere with jemalloc's ability to track memory consumption correctly. This could prevent the [cache_autotuning](../configuration/config_documentation.md#caches-and-associated-values) feature from functioning as expected, as the Python allocator may not reach the memory threshold set by `max_cache_memory_usage`, thus not triggering the cache eviction process.
This made a significant difference on Python 2.7 - it's unclear how
much of an improvement it provides on Python 3.x.

View File

@@ -1,125 +0,0 @@
# How to back up a Synapse homeserver
It is critical to maintain good backups of your server, to guard against
hardware failure as well as potential corruption due to bugs or administrator
error.
This page documents the things you will need to consider backing up as part of
a Synapse installation.
## Configuration files
Keep a copy of your configuration file (`homeserver.yaml`), as well as any
auxiliary config files it refers to such as the
[`log_config`](../configuration/config_documentation.md#log_config) file,
[`app_service_config_files`](../configuration/config_documentation.md#app_service_config_files).
Often, all such config files will be kept in a single directory such as
`/etc/synapse`, which will make this easier.
## Server signing key
Your server has a [signing
key](../configuration/config_documentation.md#signing_key_path) which it uses
to sign events and outgoing federation requests. It is easiest to back it up
with your configuration files, but an alternative is to have Synapse create a
new signing key if you have to restore.
If you do decide to replace the signing key, you should add the old *public*
key to
[`old_signing_keys`](../configuration/config_documentation.md#old_signing_keys).
## Database
Synapse's support for SQLite is only suitable for testing purposes, so for the
purposes of this document, we'll assume you are using
[PostgreSQL](../../postgres.md).
A full discussion of backup strategies for PostgreSQL is out of scope for this
document; see the [PostgreSQL
documentation](https://www.postgresql.org/docs/current/backup.html) for
detailed information.
### Synapse-specfic details
* Be very careful not to restore into a database that already has tables
present. At best, this will error; at worst, it will lead to subtle database
inconsistencies.
* The `e2e_one_time_keys_json` table should **not** be backed up, or if it is
backed up, should be
[`TRUNCATE`d](https://www.postgresql.org/docs/current/sql-truncate.html)
after restoring the database before Synapse is started.
[Background: restoring the database to an older backup can cause
used one-time-keys to be re-issued, causing subsequent [message decryption
errors](https://github.com/element-hq/element-meta/issues/2155). Clearing
all one-time-keys from the database ensures that this cannot happen, and
will prompt clients to generate and upload new one-time-keys.]
### Quick and easy database backup and restore
Typically, the easiest solution is to use `pg_dump` to take a copy of the whole
database. We recommend `pg_dump`'s custom dump format, as it produces
significantly smaller backup files.
```shell
sudo -u postgres pg_dump -Fc --exclude-table-data e2e_one_time_keys_json synapse > synapse.dump
```
There is no need to stop Postgres or Synapse while `pg_dump` is running: it
will take a consistent snapshot of the databse.
To restore, you will need to recreate the database as described in [Using
Postgres](../../postgres.md#set-up-database),
then load the dump into it with `pg_restore`:
```shell
sudo -u postgres createdb --encoding=UTF8 --locale=C --template=template0 --owner=synapse_user synapse
sudo -u postgres pg_restore -d synapse < synapse.dump
```
(If you forgot to exclude `e2e_one_time_keys_json` during `pg_dump`, remember
to connect to the new database and `TRUNCATE e2e_one_time_keys_json;` before
starting Synapse.)
To reiterate: do **not** restore a dump over an existing database.
Again, if you plan to run your homeserver at any sort of production level, we
recommend studying the PostgreSQL documentation on backup options.
## Media store
Synapse keeps a copy of media uploaded by users, including avatars and message
attachments, in its [Media
store](../configuration/config_documentation.md#media-store).
It is a directory on the local disk, containing the following directories:
* `local_content`: this is content uploaded by your local users. As a general
rule, you should back this up: it may represent the only copy of those
media files anywhere in the federation, and if they are lost, users will
see errors when viewing user or room avatars, and messages with attachments.
* `local_thumbnails`: "thumbnails" of images uploaded by your users. If
[`dynamic_thumbnails`](../configuration/config_documentation.md#dynamic_thumbnails)
is enabled, these will be regenerated if they are removed from the disk, and
there is therefore no need to back them up.
If `dynamic_thumbnails` is *not* enabled (the default): although this can
theoretically be regenerated from `local_content`, there is no tooling to do
so. We recommend that these are backed up too.
* `remote_content`: this is a cache of content that was uploaded by a user on
another server, and has since been requested by a user on your own server.
Typically there is no need to back up this directory: if a file in this directory
is removed, Synapse will attempt to fetch it again from the remote
server.
* `remote_thumbnails`: thumbnails of images uploaded by users on other
servers. As with `remote_content`, there is normally no need to back this
up.
* `url_cache`, `url_cache_thumbnails`: temporary caches of files downloaded
by the [URL previews](../../setup/installation.md#url-previews) feature.
These do not need to be backed up.

View File

@@ -246,7 +246,6 @@ Example configuration:
```yaml
presence:
enabled: false
include_offline_users_on_sync: false
```
`enabled` can also be set to a special value of "untracked" which ignores updates
@@ -255,10 +254,6 @@ received via clients and federation, while still accepting updates from the
*The "untracked" option was added in Synapse 1.96.0.*
When clients perform an initial or `full_state` sync, presence results for offline users are
not included by default. Setting `include_offline_users_on_sync` to `true` will always include
offline users in the results. Defaults to false.
---
### `require_auth_for_profile_requests`
@@ -509,8 +504,7 @@ Unix socket support (_Added in Synapse 1.89.0_):
Valid resource names are:
* `client`: the client-server API (/_matrix/client). Also implies `media` and `static`.
If configuring the main process, the Synapse Admin API (/_synapse/admin) is also implied.
* `client`: the client-server API (/_matrix/client), and the synapse admin API (/_synapse/admin). Also implies `media` and `static`.
* `consent`: user consent forms (/_matrix/consent). See [here](../../consent_tracking.md) for more.
@@ -673,9 +667,8 @@ This setting has the following sub-options:
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
Synapse will refuse to connect unless the server supports STARTTLS.
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
must present a certificate that is valid for `tlsname`. If this option
must present a certificate that is valid for 'smtp_host'. If this option
is set to false, TLS will not be used.
* `tlsname`: The domain name the SMTP server's TLS certificate must be valid for, defaulting to `smtp_host`.
* `notif_from`: defines the "From" address to use when sending emails.
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
which is normally set in `app_name`, but may be overridden by the
@@ -742,7 +735,6 @@ email:
force_tls: true
require_transport_security: true
enable_tls: false
tlsname: mail.server.example.com
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
app_name: my_branded_matrix_server
enable_notifs: true
@@ -763,19 +755,6 @@ email:
password_reset: "[%(server_name)s] Password reset"
email_validation: "[%(server_name)s] Validate your email"
```
---
### `max_event_delay_duration`
The maximum allowed duration by which sent events can be delayed, as per
[MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140).
Must be a positive value if set.
Defaults to no duration (`null`), which disallows sending delayed events.
Example configuration:
```yaml
max_event_delay_duration: 24h
```
## Homeserver blocking
Useful options for Synapse admins.
@@ -1436,7 +1415,7 @@ number of entries that can be stored.
Please see the [Config Conventions](#config-conventions) for information on how to specify memory size and cache expiry
durations.
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
They will continue to be evicted until the memory usage drops below the `target_cache_memory_usage`, set in
They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
for this option.
@@ -1780,9 +1759,8 @@ rc_3pid_validation:
### `rc_invites`
This option sets ratelimiting how often invites can be sent in a room or to a
specific user. `per_room` defaults to `per_second: 0.3`, `burst_count: 10`,
`per_user` defaults to `per_second: 0.003`, `burst_count: 5`, and `per_issuer`
defaults to `per_second: 0.3`, `burst_count: 10`.
specific user. `per_room` defaults to `per_second: 0.3`, `burst_count: 10` and
`per_user` defaults to `per_second: 0.003`, `burst_count: 5`.
Client requests that invite user(s) when [creating a
room](https://spec.matrix.org/v1.2/client-server-api/#post_matrixclientv3createroom)
@@ -1868,27 +1846,6 @@ rc_federation:
concurrent: 5
```
---
### `rc_presence`
This option sets ratelimiting for presence.
The `rc_presence.per_user` option sets rate limits on how often a specific
users' presence updates are evaluated. Ratelimited presence updates sent via sync are
ignored, and no error is returned to the client.
This option also sets the rate limit for the
[`PUT /_matrix/client/v3/presence/{userId}/status`](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3presenceuseridstatus)
endpoint.
`per_user` defaults to `per_second: 0.1`, `burst_count: 1`.
Example configuration:
```yaml
rc_presence:
per_user:
per_second: 0.05
burst_count: 0.5
```
---
### `federation_rr_transactions_per_room_per_second`
Sets outgoing federation transaction frequency for sending read-receipts,
@@ -1905,39 +1862,6 @@ federation_rr_transactions_per_room_per_second: 40
## Media Store
Config options related to Synapse's media store.
---
### `enable_authenticated_media`
When set to true, all subsequent media uploads will be marked as authenticated, and will not be available over legacy
unauthenticated media endpoints (`/_matrix/media/(r0|v3|v1)/download` and `/_matrix/media/(r0|v3|v1)/thumbnail`) - requests for authenticated media over these endpoints will result in a 404. All media, including authenticated media, will be available over the authenticated media endpoints `_matrix/client/v1/media/download` and `_matrix/client/v1/media/thumbnail`. Media uploaded prior to setting this option to true will still be available over the legacy endpoints. Note if the setting is switched to false
after enabling, media marked as authenticated will be available over legacy endpoints. Defaults to true (previously false). In a future release of Synapse, this option will be removed and become always-on.
In all cases, authenticated requests to download media will succeed, but for unauthenticated requests, this
case-by-case breakdown describes whether media downloads are permitted:
* `enable_authenticated_media = False`:
* unauthenticated client or homeserver requesting local media: allowed
* unauthenticated client or homeserver requesting remote media: allowed as long as the media is in the cache,
or as long as the remote homeserver does not require authentication to retrieve the media
* `enable_authenticated_media = True`:
* unauthenticated client or homeserver requesting local media:
allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist);
otherwise denied.
* unauthenticated client or homeserver requesting remote media: the same as for local media;
allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist);
otherwise denied.
It is especially notable that media downloaded before this option existed (in older Synapse versions), or whilst this option was set to `False`,
will perpetually be available over the legacy, unauthenticated endpoint, even after this option is set to `True`.
This is for backwards compatibility with older clients and homeservers that do not yet support requesting authenticated media;
those older clients or homeservers will not be cut off from media they can already see.
_Changed in Synapse 1.120:_ This option now defaults to `True` when not set, whereas before this version it defaulted to `False`.
Example configuration:
```yaml
enable_authenticated_media: false
```
---
### `enable_media_repo`
@@ -2022,24 +1946,6 @@ Example configuration:
max_image_pixels: 35M
```
---
### `remote_media_download_burst_count`
Remote media downloads are ratelimited using a [leaky bucket algorithm](https://en.wikipedia.org/wiki/Leaky_bucket), where a given "bucket" is keyed to the IP address of the requester when requesting remote media downloads. This configuration option sets the size of the bucket against which the size in bytes of downloads are penalized - if the bucket is full, ie a given number of bytes have already been downloaded, further downloads will be denied until the bucket drains. Defaults to 500MiB. See also `remote_media_download_per_second` which determines the rate at which the "bucket" is emptied and thus has available space to authorize new requests.
Example configuration:
```yaml
remote_media_download_burst_count: 200M
```
---
### `remote_media_download_per_second`
Works in conjunction with `remote_media_download_burst_count` to ratelimit remote media downloads - this configuration option determines the rate at which the "bucket" (see above) leaks in bytes per second. As requests are made to download remote media, the size of those requests in bytes is added to the bucket, and once the bucket has reached it's capacity, no more requests will be allowed until a number of bytes has "drained" from the bucket. This setting determines the rate at which bytes drain from the bucket, with the practical effect that the larger the number, the faster the bucket leaks, allowing for more bytes downloaded over a shorter period of time. Defaults to 87KiB per second. See also `remote_media_download_burst_count`.
Example configuration:
```yaml
remote_media_download_per_second: 40K
```
---
### `prevent_media_downloads_from`
A list of domains to never download media from. Media from these
@@ -2051,10 +1957,9 @@ This will not prevent the listed domains from accessing media themselves.
It simply prevents users on this server from downloading media originating
from the listed servers.
This will have no effect on media originating from the local server. This only
affects media downloaded from other Matrix servers, to control URL previews see
[`url_preview_ip_range_blacklist`](#url_preview_ip_range_blacklist) or
[`url_preview_url_blacklist`](#url_preview_url_blacklist).
This will have no effect on media originating from the local server.
This only affects media downloaded from other Matrix servers, to
block domains from URL previews see [`url_preview_url_blacklist`](#url_preview_url_blacklist).
Defaults to an empty list (nothing blocked).
@@ -2206,14 +2111,12 @@ url_preview_ip_range_whitelist:
---
### `url_preview_url_blacklist`
Optional list of URL matches that the URL preview spider is denied from
accessing. This is a usability feature, not a security one. You should use
`url_preview_ip_range_blacklist` in preference to this, otherwise someone could
define a public DNS entry that points to a private IP address and circumvent
the blacklist. Applications that perform redirects or serve different content
when detecting that Synapse is accessing them can also bypass the blacklist.
This is more useful if you know there is an entire shape of URL that you know
that you do not want Synapse to preview.
Optional list of URL matches that the URL preview spider is
denied from accessing. You should use `url_preview_ip_range_blacklist`
in preference to this, otherwise someone could define a public DNS
entry that points to a private IP address and circumvent the blacklist.
This is more useful if you know there is an entire shape of URL that
you know that will never want synapse to try to spider.
Each list entry is a dictionary of url component attributes as returned
by urlparse.urlsplit as applied to the absolute form of the URL. See
@@ -2372,22 +2275,6 @@ Example configuration:
```yaml
turn_shared_secret: "YOUR_SHARED_SECRET"
```
---
### `turn_shared_secret_path`
An alternative to [`turn_shared_secret`](#turn_shared_secret):
allows the shared secret to be specified in an external file.
The file should be a plain text file, containing only the shared secret.
Synapse reads the shared secret from the given file once at startup.
Example configuration:
```yaml
turn_shared_secret_path: /path/to/secrets/file
```
_Added in Synapse 1.116.0._
---
### `turn_username` and `turn_password`
@@ -2460,7 +2347,7 @@ enable_registration_without_verification: true
---
### `registrations_require_3pid`
If this is set, users must provide all of the specified types of [3PID](https://spec.matrix.org/latest/appendices/#3pid-types) when registering an account.
If this is set, users must provide all of the specified types of 3PID when registering an account.
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
@@ -2485,9 +2372,6 @@ disable_msisdn_registration: true
Mandate that users are only allowed to associate certain formats of
3PIDs with accounts on this server, as specified by the `medium` and `pattern` sub-options.
`pattern` is a [Perl-like regular expression](https://docs.python.org/3/library/re.html#module-re).
More information about 3PIDs, allowed `medium` types and their `address` syntax can be found [in the Matrix spec](https://spec.matrix.org/latest/appendices/#3pid-types).
Example configuration:
```yaml
@@ -2497,7 +2381,7 @@ allowed_local_3pids:
- medium: email
pattern: '^[^@]+@vector\.im$'
- medium: msisdn
pattern: '^44\d{10}$'
pattern: '\+44'
```
---
### `enable_3pid_lookup`
@@ -2816,7 +2700,7 @@ Example configuration:
session_lifetime: 24h
```
---
### `refreshable_access_token_lifetime`
### `refresh_access_token_lifetime`
Time that an access token remains valid for, if the session is using refresh tokens.
@@ -3114,22 +2998,6 @@ Example configuration:
```yaml
macaroon_secret_key: <PRIVATE STRING>
```
---
### `macaroon_secret_key_path`
An alternative to [`macaroon_secret_key`](#macaroon_secret_key):
allows the secret key to be specified in an external file.
The file should be a plain text file, containing only the secret key.
Synapse reads the secret key from the given file once at startup.
Example configuration:
```yaml
macaroon_secret_key_path: /path/to/secrets/file
```
_Added in Synapse 1.121.0._
---
### `form_secret`
@@ -3168,15 +3036,6 @@ it was last used.
It is possible to build an entry from an old `signing.key` file using the
`export_signing_key` script which is provided with synapse.
If you have lost the private key file, you can ask another server you trust to
tell you the public keys it has seen from your server. To fetch the keys from
`matrix.org`, try something like:
```
curl https://matrix-federation.matrix.org/_matrix/key/v2/query/myserver.example.com |
jq '.server_keys | map(.verify_keys) | add'
```
Example configuration:
```yaml
old_signing_keys:
@@ -3401,8 +3260,8 @@ saml2_config:
contact_person:
- given_name: Bob
sur_name: "the Sysadmin"
email_address: ["admin@example.com"]
contact_type: technical
email_address": ["admin@example.com"]
contact_type": technical
saml_session_lifetime: 5m
@@ -3791,8 +3650,6 @@ Additional sub-options for this setting include:
Required if `enabled` is set to true.
* `subject_claim`: Name of the claim containing a unique identifier for the user.
Optional, defaults to `sub`.
* `display_name_claim`: Name of the claim containing the display name for the user. Optional.
If provided, the display name will be set to the value of this claim upon first login.
* `issuer`: The issuer to validate the "iss" claim against. Optional. If provided the
"iss" claim will be required and validated for all JSON web tokens.
* `audiences`: A list of audiences to validate the "aud" claim against. Optional.
@@ -3807,7 +3664,6 @@ jwt_config:
secret: "provided-by-your-issuer"
algorithm: "provided-by-your-issuer"
subject_claim: "name_of_claim"
display_name_claim: "name_of_claim"
issuer: "provided-by-your-issuer"
audiences:
- "provided-by-your-issuer"
@@ -3932,8 +3788,7 @@ This setting defines options related to the user directory.
This option has the following sub-options:
* `enabled`: Defines whether users can search the user directory. If false then
empty responses are returned to all queries. Defaults to true.
* `search_all_users`: Defines whether to search all users visible to your homeserver at the time the search is performed.
If set to true, will return all users known to the homeserver matching the search query.
* `search_all_users`: Defines whether to search all users visible to your HS at the time the search is performed. If set to true, will return all users who share a room with the user from the homeserver.
If false, search results will only contain users
visible in public rooms and users sharing a room with the requester.
Defaults to false.
@@ -4256,38 +4111,6 @@ default_power_level_content_override:
trusted_private_chat: null
public_chat: null
```
The default power levels for each preset are:
```yaml
"m.room.name": 50
"m.room.power_levels": 100
"m.room.history_visibility": 100
"m.room.canonical_alias": 50
"m.room.avatar": 50
"m.room.tombstone": 100
"m.room.server_acl": 100
"m.room.encryption": 100
```
So a complete example where the default power-levels for a preset are maintained
but the power level for a new key is set is:
```yaml
default_power_level_content_override:
private_chat:
events:
"com.example.foo": 0
"m.room.name": 50
"m.room.power_levels": 100
"m.room.history_visibility": 100
"m.room.canonical_alias": 50
"m.room.avatar": 50
"m.room.tombstone": 100
"m.room.server_acl": 100
"m.room.encryption": 100
trusted_private_chat: null
public_chat: null
```
---
### `forget_rooms_on_leave`
@@ -4309,7 +4132,7 @@ By default, no room is excluded.
Example configuration:
```yaml
exclude_rooms_from_sync:
- "!foo:example.com"
- !foo:example.com
```
---
@@ -4442,12 +4265,6 @@ a `federation_sender_instances` map. Doing so will remove handling of this funct
the main process. Multiple workers can be added to this map, in which case the work is
balanced across them.
The way that the load balancing works is any outbound federation request will be assigned
to a federation sender worker based on the hash of the destination server name. This
means that all requests being sent to the same destination will be processed by the same
worker instance. Multiple `federation_sender_instances` are useful if there is a federation
with multiple servers.
This configuration setting must be shared between all workers handling federation
sending, and if changed all federation sender workers must be stopped at the same time
and then started, to ensure that all instances are running with the same config (otherwise
@@ -4486,10 +4303,6 @@ instance_map:
worker1:
host: localhost
port: 8034
other:
host: localhost
port: 8035
tls: true
```
Example configuration(#2, for UNIX sockets):
```yaml
@@ -4600,9 +4413,6 @@ This setting has the following sub-options:
* `path`: The full path to a local Unix socket file. **If this is used, `host` and
`port` are ignored.** Defaults to `/tmp/redis.sock'
* `password`: Optional password if configured on the Redis instance.
* `password_path`: Alternative to `password`, reading the password from an
external file. The file should be a plain text file, containing only the
password. Synapse reads the password from the given file once at startup.
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
* `use_tls`: Whether to use tls connection. Defaults to false.
* `certificate_file`: Optional path to the certificate file
@@ -4616,16 +4426,13 @@ This setting has the following sub-options:
_Changed in Synapse 1.85.0: Added path option to use a local Unix socket_
_Changed in Synapse 1.116.0: Added password\_path_
Example configuration:
```yaml
redis:
enabled: true
host: localhost
port: 6379
password_path: <path_to_the_password_file>
# OR password: <secret_password>
password: <secret_password>
dbid: <dbid>
#use_tls: True
#certificate_file: <path_to_the_certificate_file>
@@ -4803,9 +4610,7 @@ This setting has the following sub-options:
* `only_for_direct_messages`: Whether invites should be automatically accepted for all room types, or only
for direct messages. Defaults to false.
* `only_from_local_users`: Whether to only automatically accept invites from users on this homeserver. Defaults to false.
* `worker_to_run_on`: Which worker to run this module on. This must match
the "worker_name". If not set or `null`, invites will be accepted on the
main process.
* `worker_to_run_on`: Which worker to run this module on. This must match the "worker_name".
NOTE: Care should be taken not to enable this setting if the `synapse_auto_accept_invite` module is enabled and installed.
The two modules will compete to perform the same task and may result in undesired behaviour. For example, multiple join

View File

@@ -62,6 +62,6 @@ following documentation:
## Reporting a security vulnerability
If you've found a security issue in Synapse or any other Element project,
please report it to us in accordance with our [Security Disclosure
Policy](https://element.io/security/security-disclosure-policy). Thank you!
If you've found a security issue in Synapse or any other Matrix.org Foundation
project, please report it to us in accordance with our [Security Disclosure
Policy](https://www.matrix.org/security-disclosure-policy/). Thank you!

View File

@@ -177,11 +177,11 @@ The following applies to Synapse installations that have been installed from sou
You can start the main Synapse process with Poetry by running the following command:
```console
poetry run synapse_homeserver --config-path [your homeserver.yaml]
poetry run synapse_homeserver --config-file [your homeserver.yaml]
```
For worker setups, you can run the following command
```console
poetry run synapse_worker --config-path [your homeserver.yaml] --config-path [your worker.yaml]
poetry run synapse_worker --config-file [your homeserver.yaml] --config-file [your worker.yaml]
```
## Available worker applications
@@ -273,20 +273,23 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
# Account data requests
^/_matrix/client/(r0|v3|unstable)/.*/tags
^/_matrix/client/(r0|v3|unstable)/.*/account_data
# Receipts requests
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
# Presence requests
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
# User directory search requests
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
Additionally, the following REST endpoints can be handled for GET requests:
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
# Account data requests
^/_matrix/client/(r0|v3|unstable)/.*/tags
^/_matrix/client/(r0|v3|unstable)/.*/account_data
# Presence requests
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
Pagination requests can also be handled, but all requests for a given
room must be routed to the same instance. Additionally, care must be taken to
@@ -736,8 +739,6 @@ An example for a federation sender instance:
Handles the media repository. It can handle all endpoints starting with:
/_matrix/media/
/_matrix/client/v1/media/
/_matrix/federation/v1/media/
... and the following regular expressions matching media-specific administration APIs:

56
flake.lock generated
View File

@@ -56,6 +56,24 @@
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
@@ -168,27 +186,27 @@
},
"nixpkgs_2": {
"locked": {
"lastModified": 1729265718,
"narHash": "sha256-4HQI+6LsO3kpWTYuVGIzhJs1cetFcwT7quWCk/6rqeo=",
"lastModified": 1690535733,
"narHash": "sha256-WgjUPscQOw3cB8yySDGlyzo6cZNihnRzUwE9kadv/5I=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "ccc0c2126893dd20963580b6478d1a10a4512185",
"rev": "8cacc05fbfffeaab910e8c2c9e2a7c6b32ce881a",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"ref": "master",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_3": {
"locked": {
"lastModified": 1728538411,
"narHash": "sha256-f0SBJz1eZ2yOuKUr5CA9BHULGXVSn6miBuUWdTyhUhU=",
"lastModified": 1681358109,
"narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b69de56fac8c2b6f8fd27f2eca01dcda8e0a4221",
"rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
"type": "github"
},
"original": {
@@ -231,19 +249,20 @@
"devenv": "devenv",
"nixpkgs": "nixpkgs_2",
"rust-overlay": "rust-overlay",
"systems": "systems_2"
"systems": "systems_3"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_3"
},
"locked": {
"lastModified": 1731897198,
"narHash": "sha256-Ou7vLETSKwmE/HRQz4cImXXJBr/k9gp4J4z/PF8LzTE=",
"lastModified": 1693966243,
"narHash": "sha256-a2CA1aMIPE67JWSVIGoGtD3EGlFdK9+OlJQs0FOWCKY=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "0be641045af6d8666c11c2c40e45ffc9667839b5",
"rev": "a8b4bb4cbb744baaabc3e69099f352f99164e2c1",
"type": "github"
},
"original": {
@@ -281,6 +300,21 @@
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",

View File

@@ -3,13 +3,13 @@
# (https://github.com/matrix-org/complement) Matrix homeserver test suites are also
# installed automatically.
#
# You must have already installed Nix (https://nixos.org/download/) on your system to use this.
# Nix can be installed on any Linux distribiution or MacOS; NixOS is not required.
# Windows is not directly supported, but Nix can be installed inside of WSL2 or even Docker
# You must have already installed Nix (https://nixos.org) on your system to use this.
# Nix can be installed on Linux or MacOS; NixOS is not required. Windows is not
# directly supported, but Nix can be installed inside of WSL2 or even Docker
# containers. Please refer to https://nixos.org/download for details.
#
# You must also enable support for flakes in Nix. See the following for how to
# do so permanently: https://wiki.nixos.org/wiki/Flakes#Other_Distros,_without_Home-Manager
# do so permanently: https://nixos.wiki/wiki/Flakes#Enable_flakes
#
# Be warned: you'll need over 3.75 GB of free space to download all the dependencies.
#
@@ -20,7 +20,7 @@
# locally from "services", such as PostgreSQL and Redis.
#
# You should now be dropped into a new shell with all programs and dependencies
# available to you!
# availabile to you!
#
# You can start up pre-configured local Synapse, PostgreSQL and Redis instances by
# running: `devenv up`. To stop them, use Ctrl-C.
@@ -39,9 +39,9 @@
{
inputs = {
# Use the rolling/unstable branch of nixpkgs. Used to fetch the latest
# Use the master/unstable branch of nixpkgs. Used to fetch the latest
# available versions of packages.
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
nixpkgs.url = "github:NixOS/nixpkgs/master";
# Output a development shell for x86_64/aarch64 Linux/Darwin (MacOS).
systems.url = "github:nix-systems/default";
# A development environment manager built on Nix. See https://devenv.sh.
@@ -50,7 +50,7 @@
rust-overlay.url = "github:oxalica/rust-overlay";
};
outputs = { nixpkgs, devenv, systems, rust-overlay, ... } @ inputs:
outputs = { self, nixpkgs, devenv, systems, rust-overlay, ... } @ inputs:
let
forEachSystem = nixpkgs.lib.genAttrs (import systems);
in {
@@ -82,7 +82,7 @@
#
# NOTE: We currently need to set the Rust version unnecessarily high
# in order to work around https://github.com/matrix-org/synapse/issues/15939
(rust-bin.stable."1.82.0".default.override {
(rust-bin.stable."1.71.1".default.override {
# Additionally install the "rust-src" extension to allow diving into the
# Rust source code in an IDE (rust-analyzer will also make use of it).
extensions = [ "rust-src" ];
@@ -126,7 +126,7 @@
# Automatically activate the poetry virtualenv upon entering the shell.
languages.python.poetry.activate.enable = true;
# Install all extra Python dependencies; this is needed to run the unit
# tests and utilise all Synapse features.
# tests and utilitise all Synapse features.
languages.python.poetry.install.arguments = ["--extras all"];
# Install the 'matrix-synapse' package from the local checkout.
languages.python.poetry.install.installRootPackage = true;
@@ -163,8 +163,8 @@
# Create a postgres user called 'synapse_user' which has ownership
# over the 'synapse' database.
services.postgres.initialScript = ''
CREATE USER synapse_user;
ALTER DATABASE synapse OWNER TO synapse_user;
CREATE USER synapse_user;
ALTER DATABASE synapse OWNER TO synapse_user;
'';
# Redis is needed in order to run Synapse in worker mode.
@@ -205,7 +205,7 @@
# corresponding Nix packages on https://search.nixos.org/packages.
#
# This was done until `./install-deps.pl --dryrun` produced no output.
env.PERL5LIB = "${with pkgs.perl538Packages; makePerlPath [
env.PERL5LIB = "${with pkgs.perl536Packages; makePerlPath [
DBI
ClassMethodModifiers
CryptEd25519

View File

@@ -26,7 +26,7 @@ strict_equality = True
# Run mypy type checking with the minimum supported Python version to catch new usage
# that isn't backwards-compatible (types, overloads, etc).
python_version = 3.9
python_version = 3.8
files =
docker/,
@@ -96,6 +96,3 @@ ignore_missing_imports = True
# https://github.com/twisted/treq/pull/366
[mypy-treq.*]
ignore_missing_imports = True
[mypy-multipart.*]
ignore_missing_imports = True

2314
poetry.lock generated

File diff suppressed because it is too large Load Diff

280
pylint.cfg Normal file
View File

@@ -0,0 +1,280 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time. See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=missing-docstring
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of dummy variables
# (i.e. not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct attribute names in class
# bodies
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=__.*__
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View File

@@ -34,11 +34,15 @@
name = "Internal Changes"
showcontent = true
[tool.black]
target-version = ['py38', 'py39', 'py310', 'py311']
# black ignores everything in .gitignore by default, see
# https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore
# Use `extend-exclude` if you want to exclude something in addition to this.
[tool.ruff]
line-length = 88
target-version = "py39"
[tool.ruff.lint]
# See https://beta.ruff.rs/docs/rules/#error-e
# for error codes. The ones we ignore are:
# E501: Line too long (black enforces this for us)
@@ -58,8 +62,6 @@ select = [
"W",
# pyflakes
"F",
# isort
"I001",
# flake8-bugbear
"B0",
# flake8-comprehensions
@@ -76,20 +78,17 @@ select = [
"EXE",
]
[tool.ruff.lint.isort]
combine-as-imports = true
section-order = ["future", "standard-library", "third-party", "twisted", "first-party", "testing", "local-folder"]
known-first-party = ["synapse"]
[tool.ruff.lint.isort.sections]
twisted = ["twisted", "OpenSSL"]
testing = ["tests"]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "auto"
[tool.isort]
line_length = 88
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"]
default_section = "THIRDPARTY"
known_first_party = ["synapse"]
known_tests = ["tests"]
known_twisted = ["twisted", "OpenSSL"]
multi_line_output = 3
include_trailing_comma = true
combine_as_imports = true
skip_gitignore = true
[tool.maturin]
manifest-path = "rust/Cargo.toml"
@@ -97,7 +96,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.124.0rc1"
version = "1.108.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"
@@ -155,7 +154,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
update_synapse_database = "synapse._scripts.update_synapse_database:main"
[tool.poetry.dependencies]
python = "^3.9.0"
python = "^3.8.0"
# Mandatory Dependencies
# ----------------------
@@ -178,7 +177,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"}
treq = ">=15.1"
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
pyOpenSSL = ">=16.0.0"
PyYAML = ">=5.3"
PyYAML = ">=3.13"
pyasn1 = ">=0.1.9"
pyasn1-modules = ">=0.0.7"
bcrypt = ">=3.1.7"
@@ -201,8 +200,10 @@ netaddr = ">=0.7.18"
# add a lower bound to the Jinja2 dependency.
Jinja2 = ">=3.0"
bleach = ">=1.4.3"
# We use `assert_never`, which were added in `typing-extensions` 4.1.
typing-extensions = ">=4.1"
# We use `ParamSpec` and `Concatenate`, which were added in `typing-extensions` 3.10.0.0.
# Additionally we need https://github.com/python/typing/pull/817 to allow types to be
# generic over ParamSpecs.
typing-extensions = ">=3.10.0.1"
# We enforce that we have a `cryptography` version that bundles an `openssl`
# with the latest security patches.
cryptography = ">=3.4.7"
@@ -225,8 +226,6 @@ pydantic = ">=1.7.4, <3"
# needed.
setuptools_rust = ">=1.3"
# This is used for parsing multipart responses
python-multipart = ">=0.0.9"
# Optional Dependencies
# ---------------------
@@ -241,7 +240,7 @@ authlib = { version = ">=0.15.1", optional = true }
# `contrib/systemd/log_config.yaml`.
# Note: systemd-python 231 appears to have been yanked from pypi
systemd-python = { version = ">=231", optional = true }
lxml = { version = ">=4.5.2", optional = true }
lxml = { version = ">=4.2.0", optional = true }
sentry-sdk = { version = ">=0.7.2", optional = true }
opentracing = { version = ">=2.2.0", optional = true }
jaeger-client = { version = ">=4.0.0", optional = true }
@@ -320,7 +319,9 @@ all = [
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
ruff = "0.7.3"
isort = ">=5.10.1"
black = ">=22.7.0"
ruff = "0.3.7"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
@@ -370,7 +371,7 @@ tomli = ">=1.2.3"
# runtime errors caused by build system changes.
# We are happy to raise these upper bounds upon request,
# provided we check that it's safe to do so (i.e. that CI passes).
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
requires = ["poetry-core>=1.1.0,<=1.9.0", "setuptools_rust>=1.3,<=1.8.1"]
build-backend = "poetry.core.masonry.api"
@@ -378,19 +379,16 @@ build-backend = "poetry.core.masonry.api"
# Skip unsupported platforms (by us or by Rust).
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
# We skip:
# - CPython 3.6, 3.7 and 3.8: EOLed
# - PyPy 3.7 and 3.8: we only support Python 3.9+
# - CPython 3.6 and 3.7: EOLed
# - PyPy 3.7: we only support Python 3.8+
# - musllinux i686: excluded to reduce number of wheels we build.
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
# c.f. https://github.com/matrix-org/synapse/pull/14259
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
skip = "cp36* cp37* pp37* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
# We need a rust compiler.
#
# We temporarily pin Rust to 1.82.0 to work around
# https://github.com/element-hq/synapse/issues/17988
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal"
# We need a rust compiler
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
# For some reason if we don't manually clean the build directory we

View File

@@ -30,14 +30,14 @@ http = "1.1.0"
lazy_static = "1.4.0"
log = "0.4.17"
mime = "0.3.17"
pyo3 = { version = "0.23.2", features = [
pyo3 = { version = "0.21.0", features = [
"macros",
"anyhow",
"abi3",
"abi3-py38",
] }
pyo3-log = "0.12.0"
pythonize = "0.23.0"
pyo3-log = "0.10.0"
pythonize = "0.21.0"
regex = "1.6.0"
sha2 = "0.10.8"
serde = { version = "1.0.144", features = ["derive"] }

View File

@@ -60,7 +60,6 @@ fn bench_match_exact(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
@@ -106,7 +105,6 @@ fn bench_match_word(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
@@ -152,7 +150,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
@@ -198,7 +195,6 @@ fn bench_eval_message(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
@@ -209,7 +205,6 @@ fn bench_eval_message(b: &mut Bencher) {
false,
false,
false,
false,
);
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));

View File

@@ -32,14 +32,14 @@ use crate::push::utils::{glob_to_regex, GlobMatchType};
/// Called when registering modules with python.
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module = PyModule::new(py, "acl")?;
let child_module = PyModule::new_bound(py, "acl")?;
child_module.add_class::<ServerAclEvaluator>()?;
m.add_submodule(&child_module)?;
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import acl` work.
py.import("sys")?
py.import_bound("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.acl", child_module)?;

View File

@@ -1,107 +0,0 @@
/*
* This file is licensed under the Affero General Public License (AGPL) version 3.
*
* Copyright (C) 2024 New Vector, Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* See the GNU Affero General Public License for more details:
* <https://www.gnu.org/licenses/agpl-3.0.html>.
*/
use std::collections::HashMap;
use pyo3::{exceptions::PyValueError, pyfunction, PyResult};
use crate::{
identifier::UserID,
matrix_const::{
HISTORY_VISIBILITY_INVITED, HISTORY_VISIBILITY_JOINED, MEMBERSHIP_INVITE, MEMBERSHIP_JOIN,
},
};
#[pyfunction(name = "event_visible_to_server")]
pub fn event_visible_to_server_py(
sender: String,
target_server_name: String,
history_visibility: String,
erased_senders: HashMap<String, bool>,
partial_state_invisible: bool,
memberships: Vec<(String, String)>, // (state_key, membership)
) -> PyResult<bool> {
event_visible_to_server(
sender,
target_server_name,
history_visibility,
erased_senders,
partial_state_invisible,
memberships,
)
.map_err(|e| PyValueError::new_err(format!("{e}")))
}
/// Return whether the target server is allowed to see the event.
///
/// For a fully stated room, the target server is allowed to see an event E if:
/// - the state at E has world readable or shared history vis, OR
/// - the state at E says that the target server is in the room.
///
/// For a partially stated room, the target server is allowed to see E if:
/// - E was created by this homeserver, AND:
/// - the partial state at E has world readable or shared history vis, OR
/// - the partial state at E says that the target server is in the room.
pub fn event_visible_to_server(
sender: String,
target_server_name: String,
history_visibility: String,
erased_senders: HashMap<String, bool>,
partial_state_invisible: bool,
memberships: Vec<(String, String)>, // (state_key, membership)
) -> anyhow::Result<bool> {
if let Some(&erased) = erased_senders.get(&sender) {
if erased {
return Ok(false);
}
}
if partial_state_invisible {
return Ok(false);
}
if history_visibility != HISTORY_VISIBILITY_INVITED
&& history_visibility != HISTORY_VISIBILITY_JOINED
{
return Ok(true);
}
let mut visible = false;
for (state_key, membership) in memberships {
let state_key = UserID::try_from(state_key.as_ref())
.map_err(|e| anyhow::anyhow!(format!("invalid user_id ({state_key}): {e}")))?;
if state_key.server_name() != target_server_name {
return Err(anyhow::anyhow!(
"state_key.server_name ({}) does not match target_server_name ({target_server_name})",
state_key.server_name()
));
}
match membership.as_str() {
MEMBERSHIP_INVITE => {
if history_visibility == HISTORY_VISIBILITY_INVITED {
visible = true;
break;
}
}
MEMBERSHIP_JOIN => {
visible = true;
break;
}
_ => continue,
}
}
Ok(visible)
}

View File

@@ -41,11 +41,9 @@ use pyo3::{
pybacked::PyBackedStr,
pyclass, pymethods,
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
Bound, IntoPy, PyAny, PyObject, PyResult, Python,
};
use crate::UnwrapInfallible;
/// Definitions of the various fields of the internal metadata.
#[derive(Clone)]
enum EventInternalMetadataData {
@@ -62,59 +60,31 @@ enum EventInternalMetadataData {
impl EventInternalMetadataData {
/// Convert the field to its name and python object.
fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a Bound<'a, PyString>, Bound<'a, PyAny>) {
fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a Bound<'a, PyString>, PyObject) {
match self {
EventInternalMetadataData::OutOfBandMembership(o) => (
pyo3::intern!(py, "out_of_band_membership"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::SendOnBehalfOf(o) => (
pyo3::intern!(py, "send_on_behalf_of"),
o.into_pyobject(py).unwrap_infallible().into_any(),
),
EventInternalMetadataData::RecheckRedaction(o) => (
pyo3::intern!(py, "recheck_redaction"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::SoftFailed(o) => (
pyo3::intern!(py, "soft_failed"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::ProactivelySend(o) => (
pyo3::intern!(py, "proactively_send"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::Redacted(o) => (
pyo3::intern!(py, "redacted"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::TxnId(o) => (
pyo3::intern!(py, "txn_id"),
o.into_pyobject(py).unwrap_infallible().into_any(),
),
EventInternalMetadataData::TokenId(o) => (
pyo3::intern!(py, "token_id"),
o.into_pyobject(py).unwrap_infallible().into_any(),
),
EventInternalMetadataData::DeviceId(o) => (
pyo3::intern!(py, "device_id"),
o.into_pyobject(py).unwrap_infallible().into_any(),
),
EventInternalMetadataData::OutOfBandMembership(o) => {
(pyo3::intern!(py, "out_of_band_membership"), o.into_py(py))
}
EventInternalMetadataData::SendOnBehalfOf(o) => {
(pyo3::intern!(py, "send_on_behalf_of"), o.into_py(py))
}
EventInternalMetadataData::RecheckRedaction(o) => {
(pyo3::intern!(py, "recheck_redaction"), o.into_py(py))
}
EventInternalMetadataData::SoftFailed(o) => {
(pyo3::intern!(py, "soft_failed"), o.into_py(py))
}
EventInternalMetadataData::ProactivelySend(o) => {
(pyo3::intern!(py, "proactively_send"), o.into_py(py))
}
EventInternalMetadataData::Redacted(o) => {
(pyo3::intern!(py, "redacted"), o.into_py(py))
}
EventInternalMetadataData::TxnId(o) => (pyo3::intern!(py, "txn_id"), o.into_py(py)),
EventInternalMetadataData::TokenId(o) => (pyo3::intern!(py, "token_id"), o.into_py(py)),
EventInternalMetadataData::DeviceId(o) => {
(pyo3::intern!(py, "device_id"), o.into_py(py))
}
}
}
@@ -234,8 +204,6 @@ pub struct EventInternalMetadata {
/// The stream ordering of this event. None, until it has been persisted.
#[pyo3(get, set)]
stream_ordering: Option<NonZeroI64>,
#[pyo3(get, set)]
instance_name: Option<String>,
/// whether this event is an outlier (ie, whether we have the state at that
/// point in the DAG)
@@ -264,7 +232,6 @@ impl EventInternalMetadata {
Ok(EventInternalMetadata {
data,
stream_ordering: None,
instance_name: None,
outlier: false,
})
}
@@ -277,7 +244,7 @@ impl EventInternalMetadata {
///
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
let dict = PyDict::new(py);
let dict = PyDict::new_bound(py);
for entry in &self.data {
let (key, value) = entry.to_python_pair(py);

View File

@@ -22,23 +22,21 @@
use pyo3::{
types::{PyAnyMethods, PyModule, PyModuleMethods},
wrap_pyfunction, Bound, PyResult, Python,
Bound, PyResult, Python,
};
pub mod filter;
mod internal_metadata;
/// Called when registering modules with python.
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module = PyModule::new(py, "events")?;
let child_module = PyModule::new_bound(py, "events")?;
child_module.add_class::<internal_metadata::EventInternalMetadata>()?;
child_module.add_function(wrap_pyfunction!(filter::event_visible_to_server_py, m)?)?;
m.add_submodule(&child_module)?;
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import events` work.
py.import("sys")?
py.import_bound("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.events", child_module)?;

View File

@@ -70,7 +70,7 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult<Request
let headers_iter = request
.getattr("requestHeaders")?
.call_method0("getAllRawHeaders")?
.try_iter()?;
.iter()?;
for header in headers_iter {
let header = header?;

View File

@@ -1,252 +0,0 @@
/*
* This file is licensed under the Affero General Public License (AGPL) version 3.
*
* Copyright (C) 2024 New Vector, Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* See the GNU Affero General Public License for more details:
* <https://www.gnu.org/licenses/agpl-3.0.html>.
*/
//! # Matrix Identifiers
//!
//! This module contains definitions and utilities for working with matrix identifiers.
use std::{fmt, ops::Deref};
/// Errors that can occur when parsing a matrix identifier.
#[derive(Clone, Debug, PartialEq)]
pub enum IdentifierError {
IncorrectSigil,
MissingColon,
}
impl fmt::Display for IdentifierError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// A Matrix user_id.
#[derive(Clone, Debug, PartialEq)]
pub struct UserID(String);
impl UserID {
/// Returns the `localpart` of the user_id.
pub fn localpart(&self) -> &str {
&self[1..self.colon_pos()]
}
/// Returns the `server_name` / `domain` of the user_id.
pub fn server_name(&self) -> &str {
&self[self.colon_pos() + 1..]
}
/// Returns the position of the ':' inside of the user_id.
/// Used when splitting the user_id into it's respective parts.
fn colon_pos(&self) -> usize {
self.find(':').unwrap()
}
}
impl TryFrom<&str> for UserID {
type Error = IdentifierError;
/// Will try creating a `UserID` from the provided `&str`.
/// Can fail if the user_id is incorrectly formatted.
fn try_from(s: &str) -> Result<Self, Self::Error> {
if !s.starts_with('@') {
return Err(IdentifierError::IncorrectSigil);
}
if s.find(':').is_none() {
return Err(IdentifierError::MissingColon);
}
Ok(UserID(s.to_string()))
}
}
impl TryFrom<String> for UserID {
type Error = IdentifierError;
/// Will try creating a `UserID` from the provided `&str`.
/// Can fail if the user_id is incorrectly formatted.
fn try_from(s: String) -> Result<Self, Self::Error> {
if !s.starts_with('@') {
return Err(IdentifierError::IncorrectSigil);
}
if s.find(':').is_none() {
return Err(IdentifierError::MissingColon);
}
Ok(UserID(s))
}
}
impl<'de> serde::Deserialize<'de> for UserID {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s: String = serde::Deserialize::deserialize(deserializer)?;
UserID::try_from(s).map_err(serde::de::Error::custom)
}
}
impl Deref for UserID {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for UserID {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
/// A Matrix room_id.
#[derive(Clone, Debug, PartialEq)]
pub struct RoomID(String);
impl RoomID {
/// Returns the `localpart` of the room_id.
pub fn localpart(&self) -> &str {
&self[1..self.colon_pos()]
}
/// Returns the `server_name` / `domain` of the room_id.
pub fn server_name(&self) -> &str {
&self[self.colon_pos() + 1..]
}
/// Returns the position of the ':' inside of the room_id.
/// Used when splitting the room_id into it's respective parts.
fn colon_pos(&self) -> usize {
self.find(':').unwrap()
}
}
impl TryFrom<&str> for RoomID {
type Error = IdentifierError;
/// Will try creating a `RoomID` from the provided `&str`.
/// Can fail if the room_id is incorrectly formatted.
fn try_from(s: &str) -> Result<Self, Self::Error> {
if !s.starts_with('!') {
return Err(IdentifierError::IncorrectSigil);
}
if s.find(':').is_none() {
return Err(IdentifierError::MissingColon);
}
Ok(RoomID(s.to_string()))
}
}
impl TryFrom<String> for RoomID {
type Error = IdentifierError;
/// Will try creating a `RoomID` from the provided `String`.
/// Can fail if the room_id is incorrectly formatted.
fn try_from(s: String) -> Result<Self, Self::Error> {
if !s.starts_with('!') {
return Err(IdentifierError::IncorrectSigil);
}
if s.find(':').is_none() {
return Err(IdentifierError::MissingColon);
}
Ok(RoomID(s))
}
}
impl<'de> serde::Deserialize<'de> for RoomID {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s: String = serde::Deserialize::deserialize(deserializer)?;
RoomID::try_from(s).map_err(serde::de::Error::custom)
}
}
impl Deref for RoomID {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for RoomID {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
/// A Matrix event_id.
#[derive(Clone, Debug, PartialEq)]
pub struct EventID(String);
impl TryFrom<&str> for EventID {
type Error = IdentifierError;
/// Will try creating a `EventID` from the provided `&str`.
/// Can fail if the event_id is incorrectly formatted.
fn try_from(s: &str) -> Result<Self, Self::Error> {
if !s.starts_with('$') {
return Err(IdentifierError::IncorrectSigil);
}
Ok(EventID(s.to_string()))
}
}
impl TryFrom<String> for EventID {
type Error = IdentifierError;
/// Will try creating a `EventID` from the provided `String`.
/// Can fail if the event_id is incorrectly formatted.
fn try_from(s: String) -> Result<Self, Self::Error> {
if !s.starts_with('$') {
return Err(IdentifierError::IncorrectSigil);
}
Ok(EventID(s))
}
}
impl<'de> serde::Deserialize<'de> for EventID {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s: String = serde::Deserialize::deserialize(deserializer)?;
EventID::try_from(s).map_err(serde::de::Error::custom)
}
}
impl Deref for EventID {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for EventID {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}

View File

@@ -1,5 +1,3 @@
use std::convert::Infallible;
use lazy_static::lazy_static;
use pyo3::prelude::*;
use pyo3_log::ResetHandle;
@@ -8,8 +6,6 @@ pub mod acl;
pub mod errors;
pub mod events;
pub mod http;
pub mod identifier;
pub mod matrix_const;
pub mod push;
pub mod rendezvous;
@@ -54,16 +50,3 @@ fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
Ok(())
}
pub trait UnwrapInfallible<T> {
fn unwrap_infallible(self) -> T;
}
impl<T> UnwrapInfallible<T> for Result<T, Infallible> {
fn unwrap_infallible(self) -> T {
match self {
Ok(val) => val,
Err(never) => match never {},
}
}
}

View File

@@ -1,28 +0,0 @@
/*
* This file is licensed under the Affero General Public License (AGPL) version 3.
*
* Copyright (C) 2024 New Vector, Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* See the GNU Affero General Public License for more details:
* <https://www.gnu.org/licenses/agpl-3.0.html>.
*/
//! # Matrix Constants
//!
//! This module contains definitions for constant values described by the matrix specification.
pub const HISTORY_VISIBILITY_WORLD_READABLE: &str = "world_readable";
pub const HISTORY_VISIBILITY_SHARED: &str = "shared";
pub const HISTORY_VISIBILITY_INVITED: &str = "invited";
pub const HISTORY_VISIBILITY_JOINED: &str = "joined";
pub const MEMBERSHIP_BAN: &str = "ban";
pub const MEMBERSHIP_LEAVE: &str = "leave";
pub const MEMBERSHIP_KNOCK: &str = "knock";
pub const MEMBERSHIP_INVITE: &str = "invite";
pub const MEMBERSHIP_JOIN: &str = "join";

View File

@@ -81,7 +81,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
))]),
actions: Cow::Borrowed(&[Action::Notify]),
default: true,
default_enabled: true,
default_enabled: false,
},
PushRule {
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),

View File

@@ -105,9 +105,6 @@ pub struct PushRuleEvaluator {
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
/// flag as MSC1767 (extensible events core).
msc3931_enabled: bool,
// If MSC4210 (remove legacy mentions) is enabled.
msc4210_enabled: bool,
}
#[pymethods]
@@ -125,7 +122,6 @@ impl PushRuleEvaluator {
related_event_match_enabled,
room_version_feature_flags,
msc3931_enabled,
msc4210_enabled,
))]
pub fn py_new(
flattened_keys: BTreeMap<String, JsonValue>,
@@ -137,7 +133,6 @@ impl PushRuleEvaluator {
related_event_match_enabled: bool,
room_version_feature_flags: Vec<String>,
msc3931_enabled: bool,
msc4210_enabled: bool,
) -> Result<Self, Error> {
let body = match flattened_keys.get("content.body") {
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
@@ -155,7 +150,6 @@ impl PushRuleEvaluator {
related_event_match_enabled,
room_version_feature_flags,
msc3931_enabled,
msc4210_enabled,
})
}
@@ -167,7 +161,6 @@ impl PushRuleEvaluator {
///
/// Returns the set of actions, if any, that match (filtering out any
/// `dont_notify` and `coalesce` actions).
#[pyo3(signature = (push_rules, user_id=None, display_name=None))]
pub fn run(
&self,
push_rules: &FilteredPushRules,
@@ -183,8 +176,7 @@ impl PushRuleEvaluator {
// For backwards-compatibility the legacy mention rules are disabled
// if the event contains the 'm.mentions' property.
// Additionally, MSC4210 always disables the legacy rules.
if (self.has_mentions || self.msc4210_enabled)
if self.has_mentions
&& (rule_id == "global/override/.m.rule.contains_display_name"
|| rule_id == "global/content/.m.rule.contains_user_name"
|| rule_id == "global/override/.m.rule.roomnotif")
@@ -237,7 +229,6 @@ impl PushRuleEvaluator {
}
/// Check if the given condition matches.
#[pyo3(signature = (condition, user_id=None, display_name=None))]
fn matches(
&self,
condition: Condition,
@@ -535,7 +526,6 @@ fn push_rule_evaluator() {
true,
vec![],
true,
false,
)
.unwrap();
@@ -565,7 +555,6 @@ fn test_requires_room_version_supports_condition() {
false,
flags,
true,
false,
)
.unwrap();
@@ -593,7 +582,7 @@ fn test_requires_room_version_supports_condition() {
};
let rules = PushRules::new(vec![custom_rule]);
result = evaluator.run(
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false),
None,
None,
);

View File

@@ -65,8 +65,8 @@ use anyhow::{Context, Error};
use log::warn;
use pyo3::exceptions::PyTypeError;
use pyo3::prelude::*;
use pyo3::types::{PyBool, PyInt, PyList, PyString};
use pythonize::{depythonize, pythonize, PythonizeError};
use pyo3::types::{PyBool, PyList, PyLong, PyString};
use pythonize::{depythonize_bound, pythonize};
use serde::de::Error as _;
use serde::{Deserialize, Serialize};
use serde_json::Value;
@@ -79,7 +79,7 @@ pub mod utils;
/// Called when registering modules with python.
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module = PyModule::new(py, "push")?;
let child_module = PyModule::new_bound(py, "push")?;
child_module.add_class::<PushRule>()?;
child_module.add_class::<PushRules>()?;
child_module.add_class::<FilteredPushRules>()?;
@@ -90,7 +90,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import push` work.
py.import("sys")?
py.import_bound("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.push", child_module)?;
@@ -182,16 +182,12 @@ pub enum Action {
Unknown(Value),
}
impl<'py> IntoPyObject<'py> for Action {
type Target = PyAny;
type Output = Bound<'py, Self::Target>;
type Error = PythonizeError;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
impl IntoPy<PyObject> for Action {
fn into_py(self, py: Python<'_>) -> PyObject {
// When we pass the `Action` struct to Python we want it to be converted
// to a dict. We use `pythonize`, which converts the struct using the
// `serde` serialization.
pythonize(py, &self)
pythonize(py, &self).expect("valid action")
}
}
@@ -274,13 +270,13 @@ pub enum SimpleJsonValue {
}
impl<'source> FromPyObject<'source> for SimpleJsonValue {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
if let Ok(s) = ob.downcast::<PyString>() {
Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string())))
// A bool *is* an int, ensure we try bool first.
} else if let Ok(b) = ob.downcast::<PyBool>() {
Ok(SimpleJsonValue::Bool(b.extract()?))
} else if let Ok(i) = ob.downcast::<PyInt>() {
} else if let Ok(i) = ob.downcast::<PyLong>() {
Ok(SimpleJsonValue::Int(i.extract()?))
} else if ob.is_none() {
Ok(SimpleJsonValue::Null)
@@ -302,19 +298,15 @@ pub enum JsonValue {
}
impl<'source> FromPyObject<'source> for JsonValue {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
if let Ok(l) = ob.downcast::<PyList>() {
match l
.iter()
.map(|it| SimpleJsonValue::extract_bound(&it))
.collect()
{
match l.iter().map(SimpleJsonValue::extract).collect() {
Ok(a) => Ok(JsonValue::Array(a)),
Err(e) => Err(PyTypeError::new_err(format!(
"Can't convert to JsonValue::Array: {e}"
))),
}
} else if let Ok(v) = SimpleJsonValue::extract_bound(ob) {
} else if let Ok(v) = SimpleJsonValue::extract(ob) {
Ok(JsonValue::Value(v))
} else {
Err(PyTypeError::new_err(format!(
@@ -371,19 +363,15 @@ pub enum KnownCondition {
},
}
impl<'source> IntoPyObject<'source> for Condition {
type Target = PyAny;
type Output = Bound<'source, Self::Target>;
type Error = PythonizeError;
fn into_pyobject(self, py: Python<'source>) -> Result<Self::Output, Self::Error> {
pythonize(py, &self)
impl IntoPy<PyObject> for Condition {
fn into_py(self, py: Python<'_>) -> PyObject {
pythonize(py, &self).expect("valid condition")
}
}
impl<'source> FromPyObject<'source> for Condition {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
Ok(depythonize(ob)?)
Ok(depythonize_bound(ob.clone())?)
}
}
@@ -546,7 +534,6 @@ pub struct FilteredPushRules {
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
msc4210_enabled: bool,
}
#[pymethods]
@@ -559,7 +546,6 @@ impl FilteredPushRules {
msc3381_polls_enabled: bool,
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
msc4210_enabled: bool,
) -> Self {
Self {
push_rules,
@@ -568,7 +554,6 @@ impl FilteredPushRules {
msc3381_polls_enabled,
msc3664_enabled,
msc4028_push_encrypted_events,
msc4210_enabled,
}
}
@@ -611,14 +596,6 @@ impl FilteredPushRules {
return false;
}
if self.msc4210_enabled
&& (rule.rule_id == "global/override/.m.rule.contains_display_name"
|| rule.rule_id == "global/content/.m.rule.contains_user_name"
|| rule.rule_id == "global/override/.m.rule.roomnotif")
{
return false;
}
true
})
.map(|r| {

View File

@@ -23,6 +23,7 @@ use anyhow::bail;
use anyhow::Context;
use anyhow::Error;
use lazy_static::lazy_static;
use regex;
use regex::Regex;
use regex::RegexBuilder;

View File

@@ -29,7 +29,7 @@ use pyo3::{
exceptions::PyValueError,
pyclass, pymethods,
types::{PyAnyMethods, PyModule, PyModuleMethods},
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
Bound, Py, PyAny, PyObject, PyResult, Python, ToPyObject,
};
use ulid::Ulid;
@@ -37,7 +37,6 @@ use self::session::Session;
use crate::{
errors::{NotFoundError, SynapseError},
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
UnwrapInfallible,
};
mod session;
@@ -126,11 +125,7 @@ impl RendezvousHandler {
let base = Uri::try_from(format!("{base}_synapse/client/rendezvous"))
.map_err(|_| PyValueError::new_err("Invalid base URI"))?;
let clock = homeserver
.call_method0("get_clock")?
.into_pyobject(py)
.unwrap_infallible()
.unbind();
let clock = homeserver.call_method0("get_clock")?.to_object(py);
// Construct a Python object so that we can get a reference to the
// evict method and schedule it to run.
@@ -293,13 +288,6 @@ impl RendezvousHandler {
let mut response = Response::new(Bytes::new());
*response.status_mut() = StatusCode::ACCEPTED;
prepare_headers(response.headers_mut(), session);
// Even though this isn't mandated by the MSC, we set a Content-Type on the response. It
// doesn't do any harm as the body is empty, but this helps escape a bug in some reverse
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
response.headers_mut().typed_insert(ContentType::text());
http_response_to_twisted(twisted_request, response)?;
Ok(())
@@ -323,7 +311,7 @@ impl RendezvousHandler {
}
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module = PyModule::new(py, "rendezvous")?;
let child_module = PyModule::new_bound(py, "rendezvous")?;
child_module.add_class::<RendezvousHandler>()?;
@@ -331,7 +319,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import rendezvous` work.
py.import("sys")?
py.import_bound("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.rendezvous", child_module)?;

View File

@@ -28,11 +28,12 @@ from typing import Collection, Optional, Sequence, Set
# example)
DISTS = (
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
"debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
"debian:sid", # (rolling distro, no EOL)
"debian:bookworm", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
"debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
"ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
"ubuntu:oracular", # 24.10 (EOL 2025-07)
"ubuntu:lunar", # 23.04 (EOL 2024-01) (our EOL forced by Python 3.11 is 2027-10-24)
"ubuntu:mantic", # 23.10 (EOL 2024-07) (our EOL forced by Python 3.11 is 2027-10-24)
"debian:trixie", # (EOL not specified yet)
)

View File

@@ -31,7 +31,6 @@ Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. Se
until then, this script is a best effort to stop us from introducing type coersion bugs
(like the infamous stringy power levels fixed in room version 10).
"""
import argparse
import contextlib
import functools
@@ -45,6 +44,7 @@ import traceback
import unittest.mock
from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
@@ -56,16 +56,29 @@ from typing import (
)
from parameterized import parameterized
from typing_extensions import ParamSpec
from synapse._pydantic_compat import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
get_args,
)
from synapse._pydantic_compat import HAS_PYDANTIC_V2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
)
from pydantic.v1.typing import get_args
else:
from pydantic import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
)
from pydantic.typing import get_args
from typing_extensions import ParamSpec
logger = logging.getLogger(__name__)
@@ -169,16 +182,22 @@ def monkeypatch_pydantic() -> Generator[None, None, None]:
# Most Synapse code ought to import the patched objects directly from
# `pydantic`. But we also patch their containing modules `pydantic.main` and
# `pydantic.types` for completeness.
patch_basemodel = unittest.mock.patch(
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
patch_basemodel1 = unittest.mock.patch(
"pydantic.BaseModel", new=PatchedBaseModel
)
patches.enter_context(patch_basemodel)
patch_basemodel2 = unittest.mock.patch(
"pydantic.main.BaseModel", new=PatchedBaseModel
)
patches.enter_context(patch_basemodel1)
patches.enter_context(patch_basemodel2)
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
wrapper: Callable = make_wrapper(factory)
patch = unittest.mock.patch(
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
patch1 = unittest.mock.patch(f"pydantic.{factory.__name__}", new=wrapper)
patch2 = unittest.mock.patch(
f"pydantic.types.{factory.__name__}", new=wrapper
)
patches.enter_context(patch)
patches.enter_context(patch1)
patches.enter_context(patch2)
yield

View File

@@ -195,10 +195,6 @@ if [ -z "$skip_docker_build" ]; then
# Build the unified Complement image (from the worker Synapse image we just built).
echo_if_github "::group::Build Docker image: complement/Dockerfile"
$CONTAINER_RUNTIME build -t complement-synapse \
`# This is the tag we end up pushing to the registry (see` \
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
`# so people can reference it by the same name locally.` \
-t ghcr.io/element-hq/synapse/complement-synapse \
-f "docker/complement/Dockerfile" "docker/complement"
echo_if_github "::endgroup::"
@@ -224,11 +220,10 @@ test_packages=(
./tests/msc3874
./tests/msc3890
./tests/msc3391
./tests/msc3757
./tests/msc3930
./tests/msc3902
./tests/msc3967
./tests/msc4140
./tests/msc4115
)
# Enable dirty runs, so tests will reuse the same container where possible.

View File

@@ -43,7 +43,7 @@ import argparse
import base64
import json
import sys
from typing import Any, Dict, Mapping, Optional, Tuple, Union
from typing import Any, Dict, Optional, Tuple
from urllib import parse as urlparse
import requests
@@ -75,7 +75,7 @@ def encode_canonical_json(value: object) -> bytes:
value,
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
ensure_ascii=False,
# Remove unnecessary white space.
# Remove unecessary white space.
separators=(",", ":"),
# Sort the keys of dictionaries.
sort_keys=True,
@@ -298,23 +298,12 @@ class MatrixConnectionAdapter(HTTPAdapter):
return super().send(request, *args, **kwargs)
def get_connection_with_tls_context(
self,
request: PreparedRequest,
verify: Optional[Union[bool, str]],
proxies: Optional[Mapping[str, str]] = None,
cert: Optional[Union[Tuple[str, str], str]] = None,
def get_connection(
self, url: str, proxies: Optional[Dict[str, str]] = None
) -> HTTPConnectionPool:
# overrides the get_connection_with_tls_context() method in the base class
parsed = urlparse.urlsplit(request.url)
# Extract the server name from the request URL, and ensure it's a str.
hostname = parsed.netloc
if isinstance(hostname, bytes):
hostname = hostname.decode("utf-8")
assert isinstance(hostname, str)
(host, port, ssl_server_name) = self._lookup(hostname)
# overrides the get_connection() method in the base class
parsed = urlparse.urlsplit(url)
(host, port, ssl_server_name) = self._lookup(parsed.netloc)
print(
f"Connecting to {host}:{port} with SNI {ssl_server_name}", file=sys.stderr
)

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env bash
#
# Runs linting scripts over the local Synapse checkout
# black - opinionated code formatter
# ruff - lints and finds mistakes
# mypy - typechecks python code
# cargo clippy - lints rust code
set -e
@@ -102,15 +101,18 @@ echo
# Print out the commands being run
set -x
# Ensure the sort order of imports.
isort "${files[@]}"
# Ensure Python code conforms to an opinionated style.
python3 -m black "${files[@]}"
# Ensure the sample configuration file conforms to style checks.
./scripts-dev/config-lint.sh
# Catch any common programming mistakes in Python code.
# --quiet suppresses the update check.
ruff check --quiet --fix "${files[@]}"
# Reformat Python code.
ruff format --quiet "${files[@]}"
ruff --quiet --fix "${files[@]}"
# Catch any common programming mistakes in Rust code.
#

View File

@@ -38,7 +38,6 @@ from mypy.types import (
NoneType,
TupleType,
TypeAliasType,
TypeVarType,
UninhabitedType,
UnionType,
)
@@ -234,7 +233,6 @@ IMMUTABLE_CUSTOM_TYPES = {
"synapse.synapse_rust.push.FilteredPushRules",
# This is technically not immutable, but close enough.
"signedjson.types.VerifyKey",
"synapse.types.StrCollection",
}
# Immutable containers only if the values are also immutable.
@@ -300,7 +298,7 @@ def is_cacheable(
elif rt.type.fullname in MUTABLE_CONTAINER_TYPES:
# Mutable containers are mutable regardless of their underlying type.
return False, f"container {rt.type.fullname} is mutable"
return False, None
elif "attrs" in rt.type.metadata:
# attrs classes are only cachable iff it is frozen (immutable itself)
@@ -320,9 +318,6 @@ def is_cacheable(
else:
return False, "non-frozen attrs class"
elif rt.type.is_enum:
# We assume Enum values are immutable
return True, None
else:
# Ensure we fail for unknown types, these generally means that the
# above code is not complete.
@@ -331,18 +326,6 @@ def is_cacheable(
f"Don't know how to handle {rt.type.fullname} return type instance",
)
elif isinstance(rt, TypeVarType):
# We consider TypeVars immutable if they are bound to a set of immutable
# types.
if rt.values:
for value in rt.values:
ok, note = is_cacheable(value, signature, verbose)
if not ok:
return False, f"TypeVar bound not cacheable {value}"
return True, None
return False, "TypeVar is unbound"
elif isinstance(rt, NoneType):
# None is cachable.
return True, None
@@ -360,7 +343,7 @@ def is_cacheable(
# For a type alias, check if the underlying real type is cachable.
return is_cacheable(mypy.types.get_proper_type(rt), signature, verbose)
elif isinstance(rt, UninhabitedType):
elif isinstance(rt, UninhabitedType) and rt.is_noreturn:
# There is no return value, just consider it cachable. This is only used
# in tests.
return True, None

View File

@@ -20,7 +20,8 @@
#
#
"""An interactive script for doing a release. See `cli()` below."""
"""An interactive script for doing a release. See `cli()` below.
"""
import glob
import json
@@ -40,7 +41,7 @@ import commonmark
import git
from click.exceptions import ClickException
from git import GitCommandError, Repo
from github import BadCredentialsException, Github
from github import Github
from packaging import version
@@ -69,7 +70,6 @@ def cli() -> None:
pip install -e .[dev]
- A checkout of the sytest repository at ../sytest
- A checkout of the complement repository at ../complement
Then to use:
@@ -112,12 +112,10 @@ def _prepare() -> None:
# Make sure we're in a git repo.
synapse_repo = get_repo_and_check_clean_checkout()
sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
complement_repo = get_repo_and_check_clean_checkout("../complement", "complement")
click.secho("Updating Synapse and Sytest git repos...")
synapse_repo.remote().fetch()
sytest_repo.remote().fetch()
complement_repo.remote().fetch()
# Get the current version and AST from root Synapse module.
current_version = get_package_version()
@@ -210,15 +208,7 @@ def _prepare() -> None:
"Which branch should the release be based on?", default=default
)
for repo_name, repo in {
"synapse": synapse_repo,
"sytest": sytest_repo,
"complement": complement_repo,
}.items():
# Special case for Complement: `develop` maps to `main`
if repo_name == "complement" and branch_name == "develop":
branch_name = "main"
for repo_name, repo in {"synapse": synapse_repo, "sytest": sytest_repo}.items():
base_branch = find_ref(repo, branch_name)
if not base_branch:
print(f"Could not find base branch {branch_name} for {repo_name}!")
@@ -241,12 +231,6 @@ def _prepare() -> None:
if click.confirm("Push new SyTest branch?", default=True):
sytest_repo.git.push("-u", sytest_repo.remote().name, release_branch_name)
# Same for Complement
if click.confirm("Push new Complement branch?", default=True):
complement_repo.git.push(
"-u", complement_repo.remote().name, release_branch_name
)
# Switch to the release branch and ensure it's up to date.
synapse_repo.git.checkout(release_branch_name)
update_branch(synapse_repo)
@@ -323,9 +307,6 @@ def tag(gh_token: Optional[str]) -> None:
def _tag(gh_token: Optional[str]) -> None:
"""Tags the release and generates a draft GitHub release"""
# Test that the GH Token is valid before continuing.
check_valid_gh_token(gh_token)
# Make sure we're in a git repo.
repo = get_repo_and_check_clean_checkout()
@@ -420,11 +401,6 @@ def publish(gh_token: str) -> None:
def _publish(gh_token: str) -> None:
"""Publish release on GitHub."""
if gh_token:
# Test that the GH Token is valid before continuing.
gh = Github(gh_token)
gh.get_user()
# Make sure we're in a git repo.
get_repo_and_check_clean_checkout()
@@ -467,9 +443,6 @@ def upload(gh_token: Optional[str]) -> None:
def _upload(gh_token: Optional[str]) -> None:
"""Upload release to pypi."""
# Test that the GH Token is valid before continuing.
check_valid_gh_token(gh_token)
current_version = get_package_version()
tag_name = f"v{current_version}"
@@ -565,9 +538,6 @@ def wait_for_actions(gh_token: Optional[str]) -> None:
def _wait_for_actions(gh_token: Optional[str]) -> None:
# Test that the GH Token is valid before continuing.
check_valid_gh_token(gh_token)
# Find out the version and tag name.
current_version = get_package_version()
tag_name = f"v{current_version}"
@@ -660,9 +630,6 @@ def _merge_back() -> None:
else:
# Full release
sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
complement_repo = get_repo_and_check_clean_checkout(
"../complement", "complement"
)
if click.confirm(f"Merge {branch_name} → master?", default=True):
_merge_into(synapse_repo, branch_name, "master")
@@ -676,9 +643,6 @@ def _merge_back() -> None:
if click.confirm("On SyTest, merge master → develop?", default=True):
_merge_into(sytest_repo, "master", "develop")
if click.confirm(f"On Complement, merge {branch_name} → main?", default=True):
_merge_into(complement_repo, branch_name, "main")
@cli.command()
def announce() -> None:
@@ -724,11 +688,6 @@ Ask the designated people to do the blog and tweets."""
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
def full(gh_token: str) -> None:
if gh_token:
# Test that the GH Token is valid before continuing.
gh = Github(gh_token)
gh.get_user()
click.echo("1. If this is a security release, read the security wiki page.")
click.echo("2. Check for any release blockers before proceeding.")
click.echo(" https://github.com/element-hq/synapse/labels/X-Release-Blocker")
@@ -800,22 +759,6 @@ def get_repo_and_check_clean_checkout(
return repo
def check_valid_gh_token(gh_token: Optional[str]) -> None:
"""Check that a github token is valid, if supplied"""
if not gh_token:
# No github token supplied, so nothing to do.
return
try:
gh = Github(gh_token)
# We need to lookup name to trigger a request.
_name = gh.get_user().name
except BadCredentialsException as e:
raise click.ClickException(f"Github credentials are bad: {e}")
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
"""Find the branch/ref, looking first locally then in the remote."""
if ref_name in repo.references:

View File

@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains *incomplete* type hints for txredisapi."""
"""Contains *incomplete* type hints for txredisapi.
"""
from typing import Any, List, Optional, Type, Union
from twisted.internet import protocol

View File

@@ -20,7 +20,8 @@
#
#
"""This is an implementation of a Matrix homeserver."""
""" This is an implementation of a Matrix homeserver.
"""
import os
import sys
@@ -39,8 +40,8 @@ ImageFile.LOAD_TRUNCATED_IMAGES = True
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
# if-statement completely.
py_version = sys.version_info
if py_version < (3, 9):
print("Synapse requires Python 3.9 or above.")
if py_version < (3, 8):
print("Synapse requires Python 3.8 or above.")
sys.exit(1)
# Allow using the asyncio reactor via env var.

View File

@@ -19,8 +19,6 @@
#
#
from typing import TYPE_CHECKING
from packaging.version import Version
try:
@@ -32,64 +30,4 @@ except ImportError:
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import (
BaseModel,
Extra,
Field,
MissingError,
PydanticValueError,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
conbytes,
confloat,
conint,
constr,
parse_obj_as,
validator,
)
from pydantic.v1.error_wrappers import ErrorWrapper
from pydantic.v1.typing import get_args
else:
from pydantic import (
BaseModel,
Extra,
Field,
MissingError,
PydanticValueError,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
conbytes,
confloat,
conint,
constr,
parse_obj_as,
validator,
)
from pydantic.error_wrappers import ErrorWrapper
from pydantic.typing import get_args
__all__ = (
"HAS_PYDANTIC_V2",
"BaseModel",
"constr",
"conbytes",
"conint",
"confloat",
"ErrorWrapper",
"Extra",
"Field",
"get_args",
"MissingError",
"parse_obj_as",
"PydanticValueError",
"StrictBool",
"StrictInt",
"StrictStr",
"ValidationError",
"validator",
)
__all__ = ("HAS_PYDANTIC_V2",)

View File

@@ -44,7 +44,7 @@ logger = logging.getLogger("generate_workers_map")
class MockHomeserver(HomeServer):
DATASTORE_CLASS = DataStore
DATASTORE_CLASS = DataStore # type: ignore
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
super().__init__(config.server.server_name, config=config)
@@ -171,7 +171,7 @@ def elide_http_methods_if_unconflicting(
"""
def paths_to_methods_dict(
methods_and_paths: Iterable[Tuple[str, str]],
methods_and_paths: Iterable[Tuple[str, str]]
) -> Dict[str, Set[str]]:
"""
Given (method, path) pairs, produces a dict from path to set of methods
@@ -201,7 +201,7 @@ def elide_http_methods_if_unconflicting(
def simplify_path_regexes(
registrations: Dict[Tuple[str, str], EndpointDescription],
registrations: Dict[Tuple[str, str], EndpointDescription]
) -> Dict[Tuple[str, str], EndpointDescription]:
"""
Simplify all the path regexes for the dict of endpoint descriptions,

View File

@@ -56,9 +56,7 @@ def main() -> None:
password_pepper = password_config.get("pepper", password_pepper)
password = args.password
if not password and not sys.stdin.isatty():
password = sys.stdin.readline().strip()
elif not password:
if not password:
password = prompt_for_pass()
# On Python 2, make sure we decode it to Unicode before we normalise it

View File

@@ -52,7 +52,6 @@ def request_registration(
user_type: Optional[str] = None,
_print: Callable[[str], None] = print,
exit: Callable[[int], None] = sys.exit,
exists_ok: bool = False,
) -> None:
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
@@ -98,10 +97,6 @@ def request_registration(
r = requests.post(url, json=data)
if r.status_code != 200:
response = r.json()
if exists_ok and response["errcode"] == "M_USER_IN_USE":
_print("User already exists. Skipping.")
return
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
if 400 <= r.status_code < 500:
try:
@@ -120,7 +115,6 @@ def register_new_user(
shared_secret: str,
admin: Optional[bool],
user_type: Optional[str],
exists_ok: bool = False,
) -> None:
if not user:
try:
@@ -160,13 +154,7 @@ def register_new_user(
admin = False
request_registration(
user,
password,
server_location,
shared_secret,
bool(admin),
user_type,
exists_ok=exists_ok,
user, password, server_location, shared_secret, bool(admin), user_type
)
@@ -186,22 +174,10 @@ def main() -> None:
help="Local part of the new user. Will prompt if omitted.",
)
parser.add_argument(
"--exists-ok",
action="store_true",
help="Do not fail if user already exists.",
)
password_group = parser.add_mutually_exclusive_group()
password_group.add_argument(
"-p",
"--password",
default=None,
help="New password for user. Will prompt for a password if "
"this flag and `--password-file` are both omitted.",
)
password_group.add_argument(
"--password-file",
default=None,
help="File containing the new password for user. If set, will override `--password`.",
help="New password for user. Will prompt if omitted.",
)
parser.add_argument(
"-t",
@@ -209,7 +185,6 @@ def main() -> None:
default=None,
help="User type as specified in synapse.api.constants.UserTypes",
)
admin_group = parser.add_mutually_exclusive_group()
admin_group.add_argument(
"-a",
@@ -272,11 +247,6 @@ def main() -> None:
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
sys.exit(1)
if args.password_file:
password = _read_file(args.password_file, "password-file").strip()
else:
password = args.password
if args.server_url:
server_url = args.server_url
elif config is not None:
@@ -300,13 +270,7 @@ def main() -> None:
admin = args.admin
register_new_user(
args.user,
password,
server_url,
secret,
admin,
args.user_type,
exists_ok=args.exists_ok,
args.user, args.password, server_url, secret, admin, args.user_type
)

View File

@@ -40,7 +40,6 @@ from synapse.storage.engines import create_engine
class ReviewConfig(RootConfig):
"A config class that just pulls out the database config"
config_classes = [DatabaseConfig]
@@ -161,11 +160,7 @@ def main() -> None:
with make_conn(database_config, engine, "review_recent_signups") as db_conn:
# This generates a type of Cursor, not LoggingTransaction.
user_infos = get_recent_users(
db_conn.cursor(),
since_ms, # type: ignore[arg-type]
exclude_users_with_appservice,
)
user_infos = get_recent_users(db_conn.cursor(), since_ms, exclude_users_with_appservice) # type: ignore[arg-type]
for user_info in user_infos:
if exclude_users_with_email and user_info.emails:

Some files were not shown because too many files have changed in this diff Show More