mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
117 Commits
anoa/modul
...
anoa/print
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab2122b364 | ||
|
|
23c0c475da | ||
|
|
2fd8d88b42 | ||
|
|
0cbb2a15e0 | ||
|
|
5d71034f81 | ||
|
|
4bbde142dc | ||
|
|
d888126372 | ||
|
|
b2237ff4f1 | ||
|
|
2760d15348 | ||
|
|
5408101d21 | ||
|
|
08f570f5f5 | ||
|
|
db00925ae7 | ||
|
|
891acfd502 | ||
|
|
e02a6f5e5d | ||
|
|
4f9dc3b613 | ||
|
|
2c5deb800e | ||
|
|
a7107458c6 | ||
|
|
f02ac5a4d5 | ||
|
|
e00a411837 | ||
|
|
bc926bd99e | ||
|
|
69bab78b44 | ||
|
|
41a2762e58 | ||
|
|
3ccc5184e0 | ||
|
|
07e7980572 | ||
|
|
3595ff921f | ||
|
|
300c5558ab | ||
|
|
c0b9437ab6 | ||
|
|
f0aae62f85 | ||
|
|
349599143e | ||
|
|
2c4057bf93 | ||
|
|
f54ddbcace | ||
|
|
728512918e | ||
|
|
e0838c2567 | ||
|
|
32998d07d2 | ||
|
|
6facf98a3a | ||
|
|
0417296b9f | ||
|
|
18232871d0 | ||
|
|
7897c8f6af | ||
|
|
facb81d97b | ||
|
|
1090c3ec81 | ||
|
|
5f9f3d72b8 | ||
|
|
dc33ef90d3 | ||
|
|
a07dd43ac4 | ||
|
|
086b887f29 | ||
|
|
e23e7ae48f | ||
|
|
1a78fc8a65 | ||
|
|
66a42d4e54 | ||
|
|
cb0ed5ec76 | ||
|
|
0d20f762cb | ||
|
|
5cfe873146 | ||
|
|
77c6905805 | ||
|
|
7106f67470 | ||
|
|
d8e6fcced7 | ||
|
|
db9a61c30f | ||
|
|
634f7cf18b | ||
|
|
f6ef9c129a | ||
|
|
f1695ac20e | ||
|
|
9d81bb703c | ||
|
|
40893be93c | ||
|
|
1419b35a40 | ||
|
|
a2fa61d1b5 | ||
|
|
123eff1bc0 | ||
|
|
a092d2053a | ||
|
|
45a042ae88 | ||
|
|
72d0de9f30 | ||
|
|
5556b491c1 | ||
|
|
b835eb253c | ||
|
|
fc244bb592 | ||
|
|
cba3a814c6 | ||
|
|
3b59ac3b69 | ||
|
|
ff242faad0 | ||
|
|
6c16734cf3 | ||
|
|
4427908340 | ||
|
|
2f65b9e001 | ||
|
|
1271e896b5 | ||
|
|
418c9f3fe5 | ||
|
|
eac862629f | ||
|
|
67f22a200d | ||
|
|
da6c0cae96 | ||
|
|
b8f6ad2736 | ||
|
|
ecc90593cb | ||
|
|
a4f9274107 | ||
|
|
ec7554b768 | ||
|
|
d2c582ef3c | ||
|
|
2d07bd7fd2 | ||
|
|
a7303c5311 | ||
|
|
690b3a4fcc | ||
|
|
d399d7649a | ||
|
|
9d9275da5a | ||
|
|
ef80338c2d | ||
|
|
be75de2cfc | ||
|
|
07cfb69778 | ||
|
|
c0d6998dea | ||
|
|
8390138fa4 | ||
|
|
627be7e0a7 | ||
|
|
47fb4b43ca | ||
|
|
715cc5ee37 | ||
|
|
d440cfc9e2 | ||
|
|
18f07fdc4c | ||
|
|
e3344dc0c3 | ||
|
|
bcbbccca23 | ||
|
|
8f01eb8ee0 | ||
|
|
21d125e29a | ||
|
|
638fa0f33d | ||
|
|
38afd10823 | ||
|
|
87cfe56d14 | ||
|
|
631eed91f1 | ||
|
|
7b8831310f | ||
|
|
fb12d516cd | ||
|
|
dde4e0e83d | ||
|
|
8696551e7f | ||
|
|
28bc486bff | ||
|
|
ca27938257 | ||
|
|
036fb87584 | ||
|
|
abe974cd2b | ||
|
|
5e3839e2af | ||
|
|
0ae1f105b2 |
@@ -36,11 +36,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||
# is Python 3.9 right now)
|
||||
# is Python 3.10 right now)
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.9",
|
||||
"python-version": "3.10",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
@@ -53,12 +53,12 @@ if not IS_PR:
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
for version in ("3.10", "3.11", "3.12", "3.13")
|
||||
for version in ("3.11", "3.12", "3.13", "3.14")
|
||||
)
|
||||
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.9",
|
||||
"python-version": "3.10",
|
||||
"database": "postgres",
|
||||
"postgres-version": "13",
|
||||
"extras": "all",
|
||||
@@ -68,7 +68,7 @@ trial_postgres_tests = [
|
||||
if not IS_PR:
|
||||
trial_postgres_tests.append(
|
||||
{
|
||||
"python-version": "3.13",
|
||||
"python-version": "3.14",
|
||||
"database": "postgres",
|
||||
"postgres-version": "17",
|
||||
"extras": "all",
|
||||
@@ -77,7 +77,7 @@ if not IS_PR:
|
||||
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.9",
|
||||
"python-version": "3.10",
|
||||
"database": "sqlite",
|
||||
"extras": "",
|
||||
}
|
||||
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
||||
|
||||
# First calculate the various sytest jobs.
|
||||
#
|
||||
# For each type of test we only run on bullseye on PRs
|
||||
# For each type of test we only run on bookworm on PRs
|
||||
|
||||
|
||||
sytest_tests = [
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
@@ -127,11 +127,11 @@ if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
|
||||
6
.github/workflows/docker.yml
vendored
6
.github/workflows/docker.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: digests-${{ matrix.suffix }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
|
||||
2
.github/workflows/docs-pr.yaml
vendored
2
.github/workflows/docs-pr.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: book
|
||||
path: book
|
||||
|
||||
2
.github/workflows/fix_lint.yaml
vendored
2
.github/workflows/fix_lint.yaml
vendored
@@ -47,6 +47,6 @@ jobs:
|
||||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1
|
||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
|
||||
6
.github/workflows/latest_deps.yml
vendored
6
.github/workflows/latest_deps.yml
vendored
@@ -139,9 +139,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
@@ -173,7 +173,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
|
||||
38
.github/workflows/release-artifacts.yml
vendored
38
.github/workflows/release-artifacts.yml
vendored
@@ -101,7 +101,7 @@ jobs:
|
||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||
path: debs/*
|
||||
@@ -114,8 +114,8 @@ jobs:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
- macos-15-intel # This uses x86-64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
os: "macos-15-intel"
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
@@ -141,20 +141,23 @@ jobs:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==3.0.0
|
||||
run: python -m pip install cibuildwheel==3.2.1
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
|
||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||
#
|
||||
# cp39-*: Python 3.9 is EOL.
|
||||
# cp3??t-*: Free-threaded builds are not currently supported.
|
||||
CIBW_TEST_SKIP: pp3*-* cp39-* cp3??t-* *i686* *musl*
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse/*.whl
|
||||
@@ -175,7 +178,7 @@ jobs:
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
@@ -191,7 +194,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
@@ -200,16 +203,11 @@ jobs:
|
||||
mv debs*/* debs/
|
||||
tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
||||
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
Sdist/*
|
||||
Wheel*/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
# mark it as a prerelease if the tag contains 'rc'.
|
||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||
run: |
|
||||
gh release upload "${{ github.ref_name }}" \
|
||||
Sdist/* \
|
||||
Wheel*/* \
|
||||
debs.tar.xz \
|
||||
--repo ${{ github.repository }}
|
||||
|
||||
34
.github/workflows/tests.yml
vendored
34
.github/workflows/tests.yml
vendored
@@ -207,26 +207,6 @@ jobs:
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-pydantic:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
@@ -341,7 +321,6 @@ jobs:
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- check-lockfile
|
||||
@@ -363,7 +342,6 @@ jobs:
|
||||
lint
|
||||
lint-mypy
|
||||
lint-newsfile
|
||||
lint-pydantic
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
@@ -470,7 +448,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
@@ -514,7 +492,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.9"]
|
||||
python-version: ["pypy-3.10"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
@@ -585,7 +563,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
@@ -638,10 +616,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.9"
|
||||
- python-version: "3.10"
|
||||
postgres-version: "13"
|
||||
|
||||
- python-version: "3.13"
|
||||
- python-version: "3.14"
|
||||
postgres-version: "17"
|
||||
|
||||
services:
|
||||
@@ -683,7 +661,7 @@ jobs:
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
|
||||
6
.github/workflows/twisted_trunk.yml
vendored
6
.github/workflows/twisted_trunk.yml
vendored
@@ -108,11 +108,11 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:bullseye
|
||||
image: matrixdotorg/sytest-synapse:bookworm
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
|
||||
301
CHANGES.md
301
CHANGES.md
@@ -1,3 +1,293 @@
|
||||
# Synapse 1.142.0rc3 (2025-11-04)
|
||||
|
||||
## Dropped support for Python 3.9
|
||||
|
||||
This release drops support for Python 3.9, in line with our [dependency
|
||||
deprecation
|
||||
policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies),
|
||||
as it is now [end of life](https://endoflife.date/python).
|
||||
|
||||
## SQLite 3.40.0+ is now required
|
||||
|
||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
||||
|
||||
If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and eventually stop publishing python wheels
|
||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
||||
building Synapse from source (though note this requires a Rust compiler).
|
||||
|
||||
At present, publishing MacOS Python wheels will continue for the next release
|
||||
(1.143.0), but will not be available after that (1.144.0+). If you do make use
|
||||
of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. ([\#19140](https://github.com/element-hq/synapse/issues/19140))
|
||||
|
||||
|
||||
# Synapse 1.142.0rc2 (2025-11-04)
|
||||
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc1 (2025-11-04)
|
||||
|
||||
## Features
|
||||
|
||||
- Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134))
|
||||
- Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
||||
to allow an admin to fetch the space/room hierarchy for a given space. ([\#19021](https://github.com/element-hq/synapse/issues/19021))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. ([\#19062](https://github.com/element-hq/synapse/issues/19062))
|
||||
- Update the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. ([\#19079](https://github.com/element-hq/synapse/issues/19079))
|
||||
- Fix a bug introduced in 1.140.0 where lost logcontext warnings would be emitted from timeouts in sync and requests made by Synapse itself. ([\#19090](https://github.com/element-hq/synapse/issues/19090))
|
||||
- Fix a bug introdued in 1.140.0 where lost logcontext warning were emitted when using `HomeServer.shutdown()`. ([\#19108](https://github.com/element-hq/synapse/issues/19108))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Update the link to the Debian oldstable package for SQLite. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
- Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. ([\#19073](https://github.com/element-hq/synapse/issues/19073))
|
||||
- Update the list of Debian releases that the downstream Debian package is maintained for. ([\#19100](https://github.com/element-hq/synapse/issues/19100))
|
||||
- Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. ([\#19109](https://github.com/element-hq/synapse/issues/19109))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for Python 3.9. ([\#19099](https://github.com/element-hq/synapse/issues/19099))
|
||||
- Remove support for SQLite < 3.37.2. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. ([\#19020](https://github.com/element-hq/synapse/issues/19020))
|
||||
- Use type hinting generics in standard collections, as per [PEP 585](https://peps.python.org/pep-0585/), added in Python 3.9. ([\#19046](https://github.com/element-hq/synapse/issues/19046))
|
||||
- Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
- Move `oidc.load_metadata()` startup into `_base.start()`. ([\#19056](https://github.com/element-hq/synapse/issues/19056))
|
||||
- Remove logcontext problems caused by awaiting raw `deferLater(...)`. ([\#19058](https://github.com/element-hq/synapse/issues/19058))
|
||||
- Prevent duplicate logging setup when running multiple Synapse instances. ([\#19067](https://github.com/element-hq/synapse/issues/19067))
|
||||
- Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. ([\#19068](https://github.com/element-hq/synapse/issues/19068))
|
||||
- Update pydantic to v2. ([\#19071](https://github.com/element-hq/synapse/issues/19071))
|
||||
- Update deprecated code in the release script to prevent a warning message from being printed. ([\#19080](https://github.com/element-hq/synapse/issues/19080))
|
||||
- Update the deprecated poetry development dependencies group name in `pyproject.toml`. ([\#19081](https://github.com/element-hq/synapse/issues/19081))
|
||||
- Remove `pp38*` skip selector from cibuildwheel to silence warning. ([\#19085](https://github.com/element-hq/synapse/issues/19085))
|
||||
- Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. ([\#19088](https://github.com/element-hq/synapse/issues/19088))
|
||||
- Update the release script's generated announcement text to include a title and extra text for RC's. ([\#19089](https://github.com/element-hq/synapse/issues/19089))
|
||||
- Fix lints on main branch. ([\#19092](https://github.com/element-hq/synapse/issues/19092))
|
||||
- Use cheaper random string function in logcontext utilities. ([\#19094](https://github.com/element-hq/synapse/issues/19094))
|
||||
- Avoid clobbering other `SIGHUP` handlers in 3rd-party code. ([\#19095](https://github.com/element-hq/synapse/issues/19095))
|
||||
- Prevent duplicate GitHub draft releases being created during the Synapse release process. ([\#19096](https://github.com/element-hq/synapse/issues/19096))
|
||||
- Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. ([\#19098](https://github.com/element-hq/synapse/issues/19098))
|
||||
- Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. ([\#19107](https://github.com/element-hq/synapse/issues/19107))
|
||||
- Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. ([\#19110](https://github.com/element-hq/synapse/issues/19110))
|
||||
- Move exception handling up the stack (avoid `exit(1)` in our composable functions). ([\#19116](https://github.com/element-hq/synapse/issues/19116))
|
||||
- Fix a lint error related to lifetimes in Rust 1.90. ([\#19118](https://github.com/element-hq/synapse/issues/19118))
|
||||
- Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). ([\#19121](https://github.com/element-hq/synapse/issues/19121), [\#19131](https://github.com/element-hq/synapse/issues/19131))
|
||||
- Speed up pruning of ratelimiters. ([\#19129](https://github.com/element-hq/synapse/issues/19129))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/download-artifact from 5.0.0 to 6.0.0. ([\#19102](https://github.com/element-hq/synapse/issues/19102))
|
||||
* Bump actions/upload-artifact from 4 to 5. ([\#19106](https://github.com/element-hq/synapse/issues/19106))
|
||||
* Bump hiredis from 3.2.1 to 3.3.0. ([\#19103](https://github.com/element-hq/synapse/issues/19103))
|
||||
* Bump icu_segmenter from 2.0.0 to 2.0.1. ([\#19126](https://github.com/element-hq/synapse/issues/19126))
|
||||
* Bump idna from 3.10 to 3.11. ([\#19053](https://github.com/element-hq/synapse/issues/19053))
|
||||
* Bump ijson from 3.4.0 to 3.4.0.post0. ([\#19051](https://github.com/element-hq/synapse/issues/19051))
|
||||
* Bump markdown-it-py from 3.0.0 to 4.0.0. ([\#19123](https://github.com/element-hq/synapse/issues/19123))
|
||||
* Bump msgpack from 1.1.1 to 1.1.2. ([\#19050](https://github.com/element-hq/synapse/issues/19050))
|
||||
* Bump psycopg2 from 2.9.10 to 2.9.11. ([\#19125](https://github.com/element-hq/synapse/issues/19125))
|
||||
* Bump pyyaml from 6.0.2 to 6.0.3. ([\#19105](https://github.com/element-hq/synapse/issues/19105))
|
||||
* Bump regex from 1.11.3 to 1.12.2. ([\#19074](https://github.com/element-hq/synapse/issues/19074))
|
||||
* Bump reqwest from 0.12.23 to 0.12.24. ([\#19077](https://github.com/element-hq/synapse/issues/19077))
|
||||
* Bump ruff from 0.12.10 to 0.14.3. ([\#19124](https://github.com/element-hq/synapse/issues/19124))
|
||||
* Bump sigstore/cosign-installer from 3.10.0 to 4.0.0. ([\#19075](https://github.com/element-hq/synapse/issues/19075))
|
||||
* Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0. ([\#19052](https://github.com/element-hq/synapse/issues/19052))
|
||||
* Bump tokio from 1.47.1 to 1.48.0. ([\#19076](https://github.com/element-hq/synapse/issues/19076))
|
||||
* Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012. ([\#19054](https://github.com/element-hq/synapse/issues/19054))
|
||||
|
||||
# Synapse 1.141.0 (2025-10-29)
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and eventually stop publishing python wheels
|
||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
||||
building Synapse from source (though note this requires a Rust compiler).
|
||||
|
||||
Publishing MacOS Python wheels will continue for the next few releases. If you
|
||||
do make use of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
|
||||
## Docker images now based on Debian `trixie` with Python 3.13
|
||||
|
||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
||||
are using the Docker images as a base image you may need to e.g. adjust the
|
||||
paths you mount any additional Python packages at.
|
||||
|
||||
No significant changes since 1.141.0rc2.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.141.0rc2 (2025-10-28)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix users being unable to log in if their password, or the server's configured pepper, was too long. ([\#19101](https://github.com/element-hq/synapse/issues/19101))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.141.0rc1 (2025-10-21)
|
||||
|
||||
## Features
|
||||
|
||||
- Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper. ([\#19031](https://github.com/element-hq/synapse/issues/19031))
|
||||
- Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper. ([\#19033](https://github.com/element-hq/synapse/issues/19033))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd. ([\#19060](https://github.com/element-hq/synapse/issues/19060))
|
||||
- Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long. ([\#19078](https://github.com/element-hq/synapse/issues/19078))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Update docker image to use Debian trixie as the base and thus Python 3.13. ([\#19064](https://github.com/element-hq/synapse/issues/19064))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing). ([\#19037](https://github.com/element-hq/synapse/issues/19037))
|
||||
- Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`. ([\#19039](https://github.com/element-hq/synapse/issues/19039))
|
||||
- Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`). ([\#19040](https://github.com/element-hq/synapse/issues/19040))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0 (2025-10-14)
|
||||
|
||||
## Compatibility notice for users of `synapse-s3-storage-provider`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider)
|
||||
module must upgrade to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0).
|
||||
Using older versions of the module with this release of Synapse will prevent
|
||||
users from being able to upload or download media.
|
||||
|
||||
|
||||
No significant changes since 1.140.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0rc1 (2025-10-10)
|
||||
|
||||
## Features
|
||||
|
||||
- Add [a new Media Query by ID Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/media_admin_api.html#query-a-piece-of-media-by-id) that allows server admins to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris). ([\#18911](https://github.com/element-hq/synapse/issues/18911))
|
||||
- Add [a new Fetch Event Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/fetch_event.html) to fetch an event by ID. ([\#18963](https://github.com/element-hq/synapse/issues/18963))
|
||||
- Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available. ([\#18934](https://github.com/element-hq/synapse/issues/18934))
|
||||
- Add experimental implementation of the `GET /_matrix/client/v1/rtc/transports` endpoint for the latest draft of [MSC4143: MatrixRTC](https://github.com/matrix-org/matrix-spec-proposals/pull/4143). ([\#18967](https://github.com/element-hq/synapse/issues/18967))
|
||||
- Expose a `defer_to_threadpool` function in the Synapse Module API that allows modules to run a function on a separate thread in a custom threadpool. ([\#19032](https://github.com/element-hq/synapse/issues/19032))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback. ([\#18721](https://github.com/element-hq/synapse/issues/18721))
|
||||
- Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`. ([\#18948](https://github.com/element-hq/synapse/issues/18948))
|
||||
- Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini. ([\#19002](https://github.com/element-hq/synapse/issues/19002))
|
||||
- Update Synapse main process version string to include git info. ([\#19011](https://github.com/element-hq/synapse/issues/19011))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Explain how `Deferred` callbacks interact with logcontexts. ([\#18914](https://github.com/element-hq/synapse/issues/18914))
|
||||
- Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported. ([\#18998](https://github.com/element-hq/synapse/issues/18998))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`. ([\#18989](https://github.com/element-hq/synapse/issues/18989))
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Cleanly shutdown `SynapseHomeServer` object, allowing artifacts of embedded small hosts to be properly garbage collected. ([\#18828](https://github.com/element-hq/synapse/issues/18828))
|
||||
- Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc. ([\#18767](https://github.com/element-hq/synapse/issues/18767))
|
||||
- Fix `server_name` in logging context for multiple Synapse instances in one process. ([\#18868](https://github.com/element-hq/synapse/issues/18868))
|
||||
- Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules. ([\#18903](https://github.com/element-hq/synapse/issues/18903))
|
||||
- Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board. ([\#18913](https://github.com/element-hq/synapse/issues/18913))
|
||||
- Disconnect background process work from request trace. ([\#18932](https://github.com/element-hq/synapse/issues/18932))
|
||||
- Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load. ([\#18939](https://github.com/element-hq/synapse/issues/18939))
|
||||
- Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper. ([\#18947](https://github.com/element-hq/synapse/issues/18947))
|
||||
- Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled). ([\#18959](https://github.com/element-hq/synapse/issues/18959))
|
||||
- Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
- Add debug logs wherever we change current logcontext. ([\#18966](https://github.com/element-hq/synapse/issues/18966))
|
||||
- Update dockerfile metadata to fix broken link; point to documentation website. ([\#18971](https://github.com/element-hq/synapse/issues/18971))
|
||||
- Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs. ([\#18973](https://github.com/element-hq/synapse/issues/18973))
|
||||
- Fix logcontext handling in `timeout_deferred` tests. ([\#18974](https://github.com/element-hq/synapse/issues/18974))
|
||||
- Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process. ([\#18988](https://github.com/element-hq/synapse/issues/18988))
|
||||
- Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils. ([\#18990](https://github.com/element-hq/synapse/issues/18990))
|
||||
- Remove `MockClock()` in tests. ([\#18992](https://github.com/element-hq/synapse/issues/18992))
|
||||
- Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled. ([\#19007](https://github.com/element-hq/synapse/issues/19007))
|
||||
- Remove `version_string` argument from `HomeServer` since it's always the same. ([\#19012](https://github.com/element-hq/synapse/issues/19012))
|
||||
- Remove duplicate call to `hs.start_background_tasks()` introduced from a bad merge. ([\#19013](https://github.com/element-hq/synapse/issues/19013))
|
||||
- Split homeserver creation (`create_homeserver`) and setup (`setup`). ([\#19015](https://github.com/element-hq/synapse/issues/19015))
|
||||
- Swap near-end-of-life `macos-13` GitHub Actions runner for the `macos-15-intel` variant. ([\#19025](https://github.com/element-hq/synapse/issues/19025))
|
||||
- Introduce `RootConfig.validate_config()` which can be subclassed in `HomeServerConfig` to do cross-config class validation. ([\#19027](https://github.com/element-hq/synapse/issues/19027))
|
||||
- Allow any command of the `release.py` script to accept a `--gh-token` argument. ([\#19035](https://github.com/element-hq/synapse/issues/19035))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.8.0 to 2.8.1. ([\#18949](https://github.com/element-hq/synapse/issues/18949))
|
||||
* Bump actions/cache from 4.2.4 to 4.3.0. ([\#18983](https://github.com/element-hq/synapse/issues/18983))
|
||||
* Bump anyhow from 1.0.99 to 1.0.100. ([\#18950](https://github.com/element-hq/synapse/issues/18950))
|
||||
* Bump authlib from 1.6.3 to 1.6.4. ([\#18957](https://github.com/element-hq/synapse/issues/18957))
|
||||
* Bump authlib from 1.6.4 to 1.6.5. ([\#19019](https://github.com/element-hq/synapse/issues/19019))
|
||||
* Bump bcrypt from 4.3.0 to 5.0.0. ([\#18984](https://github.com/element-hq/synapse/issues/18984))
|
||||
* Bump docker/login-action from 3.5.0 to 3.6.0. ([\#18978](https://github.com/element-hq/synapse/issues/18978))
|
||||
* Bump lxml from 6.0.0 to 6.0.2. ([\#18979](https://github.com/element-hq/synapse/issues/18979))
|
||||
* Bump phonenumbers from 9.0.13 to 9.0.14. ([\#18954](https://github.com/element-hq/synapse/issues/18954))
|
||||
* Bump phonenumbers from 9.0.14 to 9.0.15. ([\#18991](https://github.com/element-hq/synapse/issues/18991))
|
||||
* Bump prometheus-client from 0.22.1 to 0.23.1. ([\#19016](https://github.com/element-hq/synapse/issues/19016))
|
||||
* Bump pydantic from 2.11.9 to 2.11.10. ([\#19017](https://github.com/element-hq/synapse/issues/19017))
|
||||
* Bump pygithub from 2.7.0 to 2.8.1. ([\#18952](https://github.com/element-hq/synapse/issues/18952))
|
||||
* Bump regex from 1.11.2 to 1.11.3. ([\#18981](https://github.com/element-hq/synapse/issues/18981))
|
||||
* Bump serde from 1.0.224 to 1.0.226. ([\#18953](https://github.com/element-hq/synapse/issues/18953))
|
||||
* Bump serde from 1.0.226 to 1.0.228. ([\#18982](https://github.com/element-hq/synapse/issues/18982))
|
||||
* Bump setuptools-rust from 1.11.1 to 1.12.0. ([\#18980](https://github.com/element-hq/synapse/issues/18980))
|
||||
* Bump twine from 6.1.0 to 6.2.0. ([\#18985](https://github.com/element-hq/synapse/issues/18985))
|
||||
* Bump types-pyyaml from 6.0.12.20250809 to 6.0.12.20250915. ([\#19018](https://github.com/element-hq/synapse/issues/19018))
|
||||
* Bump types-requests from 2.32.4.20250809 to 2.32.4.20250913. ([\#18951](https://github.com/element-hq/synapse/issues/18951))
|
||||
* Bump typing-extensions from 4.14.1 to 4.15.0. ([\#18956](https://github.com/element-hq/synapse/issues/18956))
|
||||
|
||||
# Synapse 1.139.2 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.1 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
@@ -8,6 +298,17 @@
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.4 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.138.3 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.3 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
140
Cargo.lock
generated
140
Cargo.lock
generated
@@ -2,21 +2,6 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.24.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "adler2"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
@@ -50,21 +35,6 @@ version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide",
|
||||
"object",
|
||||
"rustc-demangle",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
@@ -341,12 +311,6 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.4.11"
|
||||
@@ -625,9 +589,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_segmenter"
|
||||
version = "2.0.0"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3"
|
||||
checksum = "38e30e593cf9c3ca2f51aa312eb347cd1ba95715e91a842ec3fc9058eab2af4b"
|
||||
dependencies = [
|
||||
"core_maths",
|
||||
"displaydoc",
|
||||
@@ -684,17 +648,6 @@ version = "2.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
|
||||
[[package]]
|
||||
name = "io-uring"
|
||||
version = "0.7.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
@@ -784,15 +737,6 @@ version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
|
||||
dependencies = [
|
||||
"adler2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "1.0.4"
|
||||
@@ -804,15 +748,6 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.36.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
@@ -879,9 +814,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
|
||||
checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"indoc",
|
||||
@@ -897,19 +832,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
|
||||
checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
|
||||
checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@@ -917,9 +851,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.12.4"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
|
||||
checksum = "d359e20231345f21a3b5b6aea7e73f4dc97e1712ef3bfe2d88997ac6a308d784"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -928,9 +862,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
|
||||
checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@@ -940,9 +874,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
|
||||
checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -953,9 +887,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pythonize"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "597907139a488b22573158793aa7539df36ae863eba300c75f3a0d65fc475e27"
|
||||
checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"serde",
|
||||
@@ -1062,9 +996,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.3"
|
||||
version = "1.12.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c"
|
||||
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1074,9 +1008,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.11"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad"
|
||||
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1091,9 +1025,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.23"
|
||||
version = "0.12.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -1145,12 +1079,6 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.1.1"
|
||||
@@ -1489,19 +1417,16 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.47.1"
|
||||
version = "1.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
|
||||
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"io-uring",
|
||||
"libc",
|
||||
"mio",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"socket2 0.6.0",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1782,6 +1707,12 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
@@ -1800,6 +1731,15 @@ dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.61.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
import itertools
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from setuptools_rust import Binding, RustExtension
|
||||
|
||||
|
||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||
|
||||
@@ -27,12 +27,12 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
setup_kwargs["zip_safe"] = False
|
||||
|
||||
# We look up the minimum supported Python version with
|
||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
|
||||
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
|
||||
# version that matches. We then convert that into the `py_limited_api` form,
|
||||
# e.g. cp39 for Python 3.9.
|
||||
# e.g. cp310 for Python 3.10.
|
||||
py_limited_api: str
|
||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||
for minor_version in itertools.count(start=8):
|
||||
for minor_version in itertools.count(start=10):
|
||||
if f"3.{minor_version}.0" in python_bounds:
|
||||
py_limited_api = f"cp3{minor_version}"
|
||||
break
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Extend validation of uploaded device keys.
|
||||
@@ -1 +0,0 @@
|
||||
Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback.
|
||||
@@ -1 +0,0 @@
|
||||
Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc.
|
||||
@@ -1 +0,0 @@
|
||||
Cleanly shutdown `SynapseHomeServer` object.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `server_name` in logging context for multiple Synapse instances in one process.
|
||||
@@ -1 +0,0 @@
|
||||
Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules.
|
||||
@@ -1,2 +0,0 @@
|
||||
Add an Admin API that allows server admins to to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris).
|
||||
@@ -1 +0,0 @@
|
||||
Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board.
|
||||
@@ -1 +0,0 @@
|
||||
Explain how Deferred callbacks interact with logcontexts.
|
||||
@@ -1 +0,0 @@
|
||||
Disconnect background process work from request trace.
|
||||
@@ -1 +0,0 @@
|
||||
Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available.
|
||||
@@ -1 +0,0 @@
|
||||
Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load.
|
||||
@@ -1 +0,0 @@
|
||||
Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled).
|
||||
@@ -1 +0,0 @@
|
||||
Fix `run_coroutine_in_background(...)` incorrectly handling logcontext.
|
||||
@@ -1 +0,0 @@
|
||||
Add debug logs wherever we change current logcontext.
|
||||
@@ -1 +0,0 @@
|
||||
Update dockerfile metadata to fix broken link; point to documentation website.
|
||||
@@ -1 +0,0 @@
|
||||
Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs.
|
||||
@@ -1 +0,0 @@
|
||||
Fix logcontext handling in `timeout_deferred` tests.
|
||||
@@ -1 +0,0 @@
|
||||
Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process.
|
||||
@@ -1 +0,0 @@
|
||||
Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`.
|
||||
@@ -1 +0,0 @@
|
||||
Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `MockClock()` in tests.
|
||||
@@ -1 +0,0 @@
|
||||
Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal.
|
||||
@@ -1 +0,0 @@
|
||||
Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported.
|
||||
@@ -1 +0,0 @@
|
||||
Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini.
|
||||
@@ -1 +0,0 @@
|
||||
Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload).
|
||||
@@ -1 +0,0 @@
|
||||
Allow Synapse modules to specify a custom threadpool when calling `defer_to_thread`.
|
||||
@@ -24,7 +24,6 @@ import datetime
|
||||
import html
|
||||
import json
|
||||
import urllib.request
|
||||
from typing import List
|
||||
|
||||
import pydot
|
||||
|
||||
@@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
||||
return f"{pdu_id}@{origin}"
|
||||
|
||||
|
||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by querying a homeserver.
|
||||
@@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host: str, room: str) -> List[dict]:
|
||||
def get_pdus(host: str, room: str) -> list[dict]:
|
||||
transaction = json.loads(
|
||||
urllib.request.urlopen(
|
||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||
|
||||
60
debian/changelog
vendored
60
debian/changelog
vendored
@@ -1,9 +1,69 @@
|
||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
||||
|
||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.141.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
|
||||
|
||||
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.141.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
|
||||
|
||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.141.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.3.
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG POETRY_VERSION=2.1.1
|
||||
|
||||
###
|
||||
@@ -142,10 +142,10 @@ RUN \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
chown _apt:root /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||
done
|
||||
@@ -176,15 +176,15 @@ LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synap
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
||||
|
||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||
# libraries to the right place, else the `COPY` won't work.
|
||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||
# already present in the runtime image.
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||
COPY --from=builder /install /usr/local
|
||||
|
||||
# Copy the installed python packages from the builder stage.
|
||||
#
|
||||
# uv will generate a `.lock` file when installing packages, which we don't want
|
||||
# to copy to the final image.
|
||||
COPY --from=builder --exclude=.lock /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# syntax=docker/dockerfile:1-labs
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG REDIS_VERSION=7.2
|
||||
|
||||
# first of all, we create a base image with dependencies which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -11,15 +12,27 @@ ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
ARG DEBIAN_VERSION
|
||||
ARG REDIS_VERSION
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
||||
RUN \
|
||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
nginx-light
|
||||
nginx-light \
|
||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
||||
libicu76
|
||||
|
||||
RUN \
|
||||
# remove default page
|
||||
@@ -35,19 +48,12 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Copy over dependencies
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base /uv /
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
@@ -18,10 +18,10 @@ FROM $FROM
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same
|
||||
# debian version as Synapse's docker image (so the versions of the shared
|
||||
# libraries match). Any missing libraries need to be added to either the
|
||||
# Synapse image or docker/Dockerfile-workers.
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
|
||||
@@ -65,13 +65,10 @@ from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
@@ -96,7 +93,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
# Watching /_matrix/media and related needs a "media" listener
|
||||
# Stream Writers require "client" and "replication" listeners because they
|
||||
# have to attach by instance_map to the master process and have client endpoints.
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
@@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_types_set: Set[str],
|
||||
worker_types_set: set[str],
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: Optional[dict[str, Any]],
|
||||
to_be_merged_dict: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
@@ -484,7 +481,7 @@ def merge_worker_template_configs(
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
new_dict: dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
@@ -509,8 +506,8 @@ def merge_worker_template_configs(
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: dict[str, Any], worker_name: str
|
||||
) -> dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
@@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
@@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
@@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
requested_worker_types: list[str],
|
||||
) -> dict[str, set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
@@ -633,14 +630,14 @@ def parse_worker_types(
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
dict_to_return: dict[str, set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
@@ -684,7 +681,7 @@ def parse_worker_types(
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
worker_types_set: set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
@@ -743,7 +740,7 @@ def generate_worker_files(
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
requested_worker_types: dict[str, set[str]],
|
||||
) -> None:
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
@@ -764,7 +761,7 @@ def generate_worker_files(
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners: List[Any]
|
||||
listeners: list[Any]
|
||||
if using_unix_sockets:
|
||||
listeners = [
|
||||
{
|
||||
@@ -792,12 +789,12 @@ def generate_worker_files(
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
# We pass this to the Supervisor template later to generate the appropriate
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
worker_descriptors: list[dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
@@ -805,14 +802,14 @@ def generate_worker_files(
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
nginx_upstreams: dict[str, set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
nginx_locations: dict[str, str] = {}
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -846,7 +843,7 @@ def generate_worker_files(
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
worker_config: dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
@@ -1029,7 +1026,7 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||
extra_log_template_args: dict[str, Optional[str]] = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
@@ -1053,7 +1050,7 @@ def generate_worker_log_config(
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--generate-only",
|
||||
@@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
requested_worker_types: dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
#
|
||||
# Used by `complement.sh`. Not suitable for production use.
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bookworm.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||
# We hardcode the use of Debian trixie here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting trixie.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
@@ -69,7 +69,7 @@ def generate_config_from_template(
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ: Dict[str, Any] = dict(os_environ)
|
||||
environ: dict[str, Any] = dict(os_environ)
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
subprocess.run(args, check=True)
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
mode = args[1] if len(args) > 1 else "run"
|
||||
|
||||
# if we were given an explicit user to switch to, do so
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
- [Admin API](usage/administration/admin_api/README.md)
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Fetch Event](admin_api/fetch_event.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
@@ -115,6 +116,8 @@
|
||||
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
||||
- [Media Repository](media_repository.md)
|
||||
- [Room and User Statistics](room_and_user_statistics.md)
|
||||
- [Releasing]()
|
||||
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
|
||||
- [Scripts]()
|
||||
|
||||
# Other
|
||||
|
||||
53
docs/admin_api/fetch_event.md
Normal file
53
docs/admin_api/fetch_event.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Fetch Event API
|
||||
|
||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
||||
originated in.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"event": {
|
||||
"auth_events": [
|
||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
||||
],
|
||||
"content": {
|
||||
"body": "Hey now",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"depth": 6,
|
||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
||||
"hashes": {
|
||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
||||
},
|
||||
"origin_server_ts": 799,
|
||||
"prev_events": [
|
||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
||||
],
|
||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
||||
"sender": "@user:test",
|
||||
"signatures": {
|
||||
"test": {
|
||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 799
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -1115,3 +1115,76 @@ Example response:
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Admin Space Hierarchy Endpoint
|
||||
|
||||
This API allows an admin to fetch the space/room hierarchy for a given space,
|
||||
returning details about that room and any children the room may have, paginating
|
||||
over the space tree in a depth-first manner to locate child rooms. This is
|
||||
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
|
||||
room membership when returning room summaries.
|
||||
|
||||
The endpoint does not query other servers over federation about remote rooms
|
||||
that the server has not joined. This is a deliberate trade-off: while this
|
||||
means it will leave some holes in the hierarchy that we could otherwise
|
||||
sometimes fill in, it significantly improves the endpoint's response time and
|
||||
the admin endpoint is designed for managing rooms local to the homeserver
|
||||
anyway.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` - An optional pagination token, provided when there are more rooms to
|
||||
return than the limit.
|
||||
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
|
||||
defaults to `50`.
|
||||
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
|
||||
integer. 0 would correspond to just the root room, 1 would include just the
|
||||
root room's children, etc. If not provided will recurse into the space tree without limit.
|
||||
|
||||
Request:
|
||||
|
||||
```http
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"rooms":
|
||||
[
|
||||
{ "children_state": [
|
||||
{
|
||||
"content": {
|
||||
"via": ["local_test_server"]
|
||||
},
|
||||
"origin_server_ts": 1500,
|
||||
"sender": "@user:test",
|
||||
"state_key": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"type": "m.space.child"
|
||||
}
|
||||
],
|
||||
"name": "space room",
|
||||
"guest_can_join": false,
|
||||
"join_rule": "public",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
|
||||
"room_type": "m.space",
|
||||
"world_readable": false
|
||||
},
|
||||
|
||||
{
|
||||
"children_state": [],
|
||||
"guest_can_join": true,
|
||||
"join_rule": "invite",
|
||||
"name": "nefarious",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"topic": "being bad",
|
||||
"world_readable": false}
|
||||
],
|
||||
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -21,7 +21,7 @@ people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
|
||||
|
||||
@@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
|
||||
@@ -79,17 +79,17 @@ phonenumbers = [
|
||||
We can see this pinned version inside the docker image for that release:
|
||||
|
||||
```
|
||||
$ docker pull vectorim/synapse:v1.97.0
|
||||
$ docker pull matrixdotorg/synapse:latest
|
||||
...
|
||||
$ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers
|
||||
$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers
|
||||
Name: phonenumbers
|
||||
Version: 8.12.44
|
||||
Version: 9.0.15
|
||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||
Author: David Drysdale
|
||||
Author-email: dmd@lurklurk.org
|
||||
License: Apache License 2.0
|
||||
Location: /usr/local/lib/python3.9/site-packages
|
||||
Location: /usr/local/lib/python3.12/site-packages
|
||||
Requires:
|
||||
Required-by: matrix-synapse
|
||||
```
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
# Release notes review checklist
|
||||
|
||||
The Synapse release process includes a step to review the changelog before
|
||||
publishing it. The following is a list of common points to check for:
|
||||
|
||||
1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)).
|
||||
2. Link any MSCXXXX lines to the Matrix Spec Change itself: <https://github.com/matrix-org/matrix-spec-proposals/pull/xxxx>.
|
||||
3. Wrap any class names, variable names, etc. in back-ticks, if needed.
|
||||
4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release.
|
||||
5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted.
|
||||
6. Quickly skim and check that each entry is in the appropriate section.
|
||||
7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...".
|
||||
@@ -87,17 +87,13 @@ file when you upgrade the Debian package to a later version.
|
||||
Andrej Shadura maintains a
|
||||
[`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in
|
||||
the Debian repositories.
|
||||
For `bookworm` and `sid`, it can be installed simply with:
|
||||
For `forky` (14) and `sid` (rolling release), it can be installed simply with:
|
||||
|
||||
```sh
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
Synapse is also available in `bullseye-backports`. Please
|
||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||
for information on how to use backports.
|
||||
|
||||
`matrix-synapse` is no longer maintained for `buster` and older.
|
||||
The downstream Debian `matrix-synapse` package is not available for `trixie` (13) and older. Consider using the Matrix.org packages (above).
|
||||
|
||||
##### Downstream Ubuntu packages
|
||||
|
||||
@@ -208,7 +204,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.9 or later, up to Python 3.13.
|
||||
- Python 3.10 or later, up to Python 3.13.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
If building on an uncommon architecture for which pre-built wheels are
|
||||
@@ -311,11 +307,16 @@ sudo dnf group install "Development Tools"
|
||||
|
||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
||||
|
||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux.
|
||||
The distributions are 1:1 binary compatible.*
|
||||
|
||||
It's recommended to use the latest Python versions.
|
||||
|
||||
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||
RHEL 8 & 9 in particular ship with Python 3.6 & 3.9 respectively by default
|
||||
which are EOL and therefore no longer supported by Synapse.
|
||||
However, newer Python versions provide significant performance improvements
|
||||
and they're available in official distributions' repositories.
|
||||
Therefore it's recommended to use them.
|
||||
|
||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||
|
||||
|
||||
@@ -117,6 +117,44 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.142.0
|
||||
|
||||
## Python 3.10+ is now required
|
||||
|
||||
The minimum supported Python version has been increased from v3.9 to v3.10.
|
||||
You will need Python 3.10+ to run Synapse v1.142.0.
|
||||
|
||||
If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
## SQLite 3.40.0+ is now required
|
||||
|
||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
||||
|
||||
If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
|
||||
# Upgrading to v1.141.0
|
||||
|
||||
## Docker images now based on Debian `trixie` with Python 3.13
|
||||
|
||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
||||
are using the Docker images as a base image you may need to e.g. adjust the
|
||||
paths you mount any additional Python packages at.
|
||||
|
||||
# Upgrading to v1.140.0
|
||||
|
||||
## Users of `synapse-s3-storage-provider` must update the module to `v1.6.0`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider/)
|
||||
module must update it to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0),
|
||||
otherwise users will be unable to upload or download media.
|
||||
|
||||
# Upgrading to v1.139.0
|
||||
|
||||
## `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
@@ -2573,6 +2573,28 @@ Example configuration:
|
||||
turn_allow_guests: false
|
||||
```
|
||||
---
|
||||
### `matrix_rtc`
|
||||
|
||||
*(object)* Options related to MatrixRTC. Defaults to `{}`.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `transports` (array): A list of transport types and arguments to use for MatrixRTC connections. Defaults to `[]`.
|
||||
|
||||
Options for each entry include:
|
||||
|
||||
* `type` (string): The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
|
||||
* `livekit_service_url` (string): The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
matrix_rtc:
|
||||
transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
```
|
||||
---
|
||||
## Registration
|
||||
|
||||
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
||||
@@ -3793,7 +3815,7 @@ This setting has the following sub-options:
|
||||
|
||||
* `localdb_enabled` (boolean): Set to false to disable authentication against the local password database. This is ignored if `enabled` is false, and is only useful if you have other `password_providers`. Defaults to `true`.
|
||||
|
||||
* `pepper` (string|null): Set the value here to a secret random string for extra security. DO NOT CHANGE THIS AFTER INITIAL SETUP! Defaults to `null`.
|
||||
* `pepper` (string|null): A secret random string that will be appended to user's passwords before they are hashed. This improves the security of short passwords. DO NOT CHANGE THIS AFTER INITIAL SETUP! Defaults to `null`.
|
||||
|
||||
* `policy` (object): Define and enforce a password policy, such as minimum lengths for passwords, etc. This is an implementation of MSC2000.
|
||||
|
||||
|
||||
@@ -120,6 +120,9 @@ worker_replication_secret: ""
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
# For additional Redis configuration options (TLS, authentication, etc.),
|
||||
# see the Synapse configuration documentation:
|
||||
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis
|
||||
|
||||
instance_map:
|
||||
main:
|
||||
|
||||
4
mypy.ini
4
mypy.ini
@@ -37,7 +37,7 @@ strict_equality = True
|
||||
|
||||
# Run mypy type checking with the minimum supported Python version to catch new usage
|
||||
# that isn't backwards-compatible (types, overloads, etc).
|
||||
python_version = 3.9
|
||||
python_version = 3.10
|
||||
|
||||
files =
|
||||
docker/,
|
||||
@@ -69,7 +69,7 @@ warn_unused_ignores = False
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||
;; `[tool.poetry.dev-dependencies]` list.
|
||||
;; `[tool.poetry.group.dev.dependencies]` list.
|
||||
|
||||
# https://github.com/lepture/authlib/issues/460
|
||||
[mypy-authlib.*]
|
||||
|
||||
1336
poetry.lock
generated
1336
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -36,7 +36,7 @@
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py39"
|
||||
target-version = "py310"
|
||||
|
||||
[tool.ruff.lint]
|
||||
# See https://beta.ruff.rs/docs/rules/#error-e
|
||||
@@ -78,6 +78,12 @@ select = [
|
||||
"LOG",
|
||||
# flake8-logging-format
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade
|
||||
"UP006"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
@@ -101,7 +107,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.139.1"
|
||||
version = "1.142.0rc3"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
@@ -159,12 +165,16 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9.0"
|
||||
python = "^3.10.0"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# 0.25.0 is the first version to support Python 3.14.
|
||||
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
|
||||
# and included in a release.
|
||||
rpds-py = ">=0.25.0"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
@@ -195,7 +205,8 @@ bcrypt = ">=3.1.7"
|
||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||
Pillow = ">=10.0.1"
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
sortedcontainers = ">=1.5.2"
|
||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
sortedcontainers = ">=2.0.5"
|
||||
pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
@@ -211,9 +222,10 @@ netaddr = ">=0.7.18"
|
||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
Jinja2 = ">=3.0"
|
||||
bleach = ">=1.4.3"
|
||||
# We use `assert_never`, which were added in `typing-extensions` 4.1.
|
||||
typing-extensions = ">=4.1"
|
||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
bleach = ">=3.2.0"
|
||||
# pydantic 2.12 depends on typing-extensions>=4.14.1
|
||||
typing-extensions = ">=4.14.1"
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
cryptography = ">=3.4.7"
|
||||
@@ -222,9 +234,10 @@ ijson = ">=3.1.4"
|
||||
matrix-common = "^1.3.0"
|
||||
# We need packaging.verison.Version(...).major added in 20.0.
|
||||
packaging = ">=20.0"
|
||||
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
|
||||
# See https://github.com/matrix-org/synapse/issues/15858
|
||||
pydantic = ">=1.7.4, <3"
|
||||
pydantic = [
|
||||
{ version = "~=2.8", python = "<3.14" },
|
||||
{ version = "~=2.12", python = ">=3.14" },
|
||||
]
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
@@ -252,10 +265,12 @@ authlib = { version = ">=0.15.1", optional = true }
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
# Note: systemd-python 231 appears to have been yanked from pypi
|
||||
systemd-python = { version = ">=231", optional = true }
|
||||
lxml = { version = ">=4.5.2", optional = true }
|
||||
# 4.6.3 removes usage of _PyGen_Send which is unavailable in CPython as of Python 3.10.
|
||||
lxml = { version = ">=4.6.3", optional = true }
|
||||
sentry-sdk = { version = ">=0.7.2", optional = true }
|
||||
opentracing = { version = ">=2.2.0", optional = true }
|
||||
jaeger-client = { version = ">=4.0.0", optional = true }
|
||||
# 4.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
jaeger-client = { version = ">=4.2.0", optional = true }
|
||||
txredisapi = { version = ">=1.4.7", optional = true }
|
||||
hiredis = { version = "*", optional = true }
|
||||
Pympler = { version = "*", optional = true }
|
||||
@@ -319,14 +334,12 @@ all = [
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.12.10"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
ruff = "0.14.3"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
@@ -356,7 +369,7 @@ click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.55"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
@@ -381,10 +394,10 @@ build-backend = "poetry.core.masonry.api"
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython and PyPy 3.8: EOLed
|
||||
# - CPython 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
skip = "cp38* pp38* *-musllinux_i686"
|
||||
skip = "cp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
@@ -30,14 +30,14 @@ http = "1.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
mime = "0.3.17"
|
||||
pyo3 = { version = "0.25.1", features = [
|
||||
pyo3 = { version = "0.26.0", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
"abi3-py39",
|
||||
"abi3-py310",
|
||||
] }
|
||||
pyo3-log = "0.12.4"
|
||||
pythonize = "0.25.0"
|
||||
pyo3-log = "0.13.1"
|
||||
pythonize = "0.26.0"
|
||||
regex = "1.6.0"
|
||||
sha2 = "0.10.8"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
|
||||
@@ -41,7 +41,7 @@ use pyo3::{
|
||||
pybacked::PyBackedStr,
|
||||
pyclass, pymethods,
|
||||
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
||||
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
|
||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
||||
};
|
||||
|
||||
use crate::UnwrapInfallible;
|
||||
@@ -289,7 +289,7 @@ impl EventInternalMetadata {
|
||||
/// Get a dict holding the data stored in the `internal_metadata` column in the database.
|
||||
///
|
||||
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
||||
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
|
||||
fn get_dict(&self, py: Python<'_>) -> PyResult<Py<PyAny>> {
|
||||
let dict = PyDict::new(py);
|
||||
|
||||
for entry in &self.data {
|
||||
|
||||
@@ -134,10 +134,10 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRunt
|
||||
}
|
||||
|
||||
/// A reference to the `twisted.internet.defer` module.
|
||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||
static DEFER: OnceCell<Py<PyAny>> = OnceCell::new();
|
||||
|
||||
/// Access to the `twisted.internet.defer` module.
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
|
||||
Ok(DEFER
|
||||
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
||||
.bind(py))
|
||||
@@ -165,7 +165,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
||||
#[pyclass]
|
||||
struct HttpClient {
|
||||
client: reqwest::Client,
|
||||
reactor: PyObject,
|
||||
reactor: Py<PyAny>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
@@ -237,7 +237,7 @@ impl HttpClient {
|
||||
return Err(HttpResponseException::new(status, buffer));
|
||||
}
|
||||
|
||||
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||
let r = Python::attach(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||
|
||||
Ok(r)
|
||||
})
|
||||
@@ -270,7 +270,7 @@ where
|
||||
handle.spawn(async move {
|
||||
let res = task.await;
|
||||
|
||||
Python::with_gil(move |py| {
|
||||
Python::attach(move |py| {
|
||||
// Flatten the panic into standard python error
|
||||
let res = match res {
|
||||
Ok(r) => r,
|
||||
|
||||
@@ -29,7 +29,7 @@ use pyo3::{
|
||||
exceptions::PyValueError,
|
||||
pyclass, pymethods,
|
||||
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
||||
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
|
||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
||||
};
|
||||
use ulid::Ulid;
|
||||
|
||||
@@ -56,7 +56,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
||||
#[pyclass]
|
||||
struct RendezvousHandler {
|
||||
base: Uri,
|
||||
clock: PyObject,
|
||||
clock: Py<PyAny>,
|
||||
sessions: BTreeMap<Ulid, Session>,
|
||||
capacity: usize,
|
||||
max_content_length: u64,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.139/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.142/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -2884,6 +2884,35 @@ properties:
|
||||
default: true
|
||||
examples:
|
||||
- false
|
||||
matrix_rtc:
|
||||
type: object
|
||||
description: >-
|
||||
Options related to MatrixRTC.
|
||||
properties:
|
||||
transports:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
example: livekit
|
||||
livekit_service_url:
|
||||
type: string
|
||||
description: >-
|
||||
The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
example: https://matrix-rtc.example.com/livekit/jwt
|
||||
description:
|
||||
A list of transport types and arguments to use for MatrixRTC connections.
|
||||
default: []
|
||||
default: {}
|
||||
examples:
|
||||
- transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
enable_registration:
|
||||
type: boolean
|
||||
description: >-
|
||||
@@ -4666,8 +4695,9 @@ properties:
|
||||
pepper:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Set the value here to a secret random string for extra security. DO
|
||||
NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||
A secret random string that will be appended to user's passwords
|
||||
before they are hashed. This improves the security of short passwords.
|
||||
DO NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||
default: null
|
||||
policy:
|
||||
type: object
|
||||
|
||||
@@ -18,16 +18,15 @@ import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
from typing import Collection, Optional, Sequence
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bullseye
|
||||
# e.g. docker.io/library/debian:bookworm
|
||||
#
|
||||
# If an EOL is forced by a Python version and we're dropping support for it, make sure
|
||||
# to remove references to the distibution across Synapse (search for "bullseye" for
|
||||
# to remove references to the distibution across Synapse (search for "bookworm" for
|
||||
# example)
|
||||
DISTS = (
|
||||
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
|
||||
"debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||
"debian:sid", # (rolling distro, no EOL)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
@@ -54,7 +53,7 @@ class Builder:
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
self.active_containers: Set[str] = set()
|
||||
self.active_containers: set[str] = set()
|
||||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import tomli
|
||||
|
||||
@@ -33,7 +32,7 @@ def main() -> None:
|
||||
|
||||
# Poetry 1.3+ lockfile format:
|
||||
# There's a `files` inline table in each [[package]]
|
||||
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
|
||||
packages_to_assets: dict[str, list[dict[str, str]]] = {
|
||||
package["name"]: package["files"] for package in lockfile_content["package"]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,478 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
"""
|
||||
A script which enforces that Synapse always uses strict types when defining a Pydantic
|
||||
model.
|
||||
|
||||
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
|
||||
|
||||
https://github.com/pydantic/pydantic/issues/1098
|
||||
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
|
||||
|
||||
until then, this script is a best effort to stop us from introducing type coersion bugs
|
||||
(like the infamous stringy power levels fixed in room version 10).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import functools
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
import unittest.mock
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from parameterized import parameterized
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
get_args,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||
constr,
|
||||
conbytes,
|
||||
conint,
|
||||
confloat,
|
||||
]
|
||||
|
||||
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
|
||||
str,
|
||||
bytes,
|
||||
int,
|
||||
float,
|
||||
bool,
|
||||
]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class ModelCheckerException(Exception):
|
||||
"""Dummy exception. Allows us to detect unwanted types during a module import."""
|
||||
|
||||
|
||||
class MissingStrictInConstrainedTypeException(ModelCheckerException):
|
||||
factory_name: str
|
||||
|
||||
def __init__(self, factory_name: str):
|
||||
self.factory_name = factory_name
|
||||
|
||||
|
||||
class FieldHasUnwantedTypeException(ModelCheckerException):
|
||||
message: str
|
||||
|
||||
def __init__(self, message: str):
|
||||
self.message = message
|
||||
|
||||
|
||||
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
|
||||
"""We patch `constr` and friends with wrappers that enforce strict=True."""
|
||||
|
||||
@functools.wraps(factory)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if "strict" not in kwargs:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
if not kwargs["strict"]:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
return factory(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def field_type_unwanted(type_: Any) -> bool:
|
||||
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
|
||||
|
||||
At present, we exclude types which will coerce, or any generic type involving types
|
||||
which will coerce."""
|
||||
logger.debug("Is %s unwanted?")
|
||||
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
|
||||
logger.debug("yes")
|
||||
return True
|
||||
logger.debug("Maybe. Subargs are %s", get_args(type_))
|
||||
rv = any(field_type_unwanted(t) for t in get_args(type_))
|
||||
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
|
||||
return rv
|
||||
|
||||
|
||||
class PatchedBaseModel(PydanticBaseModel):
|
||||
"""A patched version of BaseModel that inspects fields after models are defined.
|
||||
|
||||
We complain loudly if we see an unwanted type.
|
||||
|
||||
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
|
||||
for field in cls.__fields__.values():
|
||||
# Note that field.type_ and field.outer_type are computed based on the
|
||||
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||
if field_type_unwanted(field.outer_type_):
|
||||
# TODO: this only reports the first bad field. Can we find all bad ones
|
||||
# and report them all?
|
||||
raise FieldHasUnwantedTypeException(
|
||||
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
|
||||
f"with unwanted type `{field.outer_type_}`"
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def monkeypatch_pydantic() -> Generator[None, None, None]:
|
||||
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
|
||||
|
||||
If the snooping functions see something they don't like, they'll raise a
|
||||
ModelCheckingException instance.
|
||||
"""
|
||||
with contextlib.ExitStack() as patches:
|
||||
# Most Synapse code ought to import the patched objects directly from
|
||||
# `pydantic`. But we also patch their containing modules `pydantic.main` and
|
||||
# `pydantic.types` for completeness.
|
||||
patch_basemodel = unittest.mock.patch(
|
||||
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
|
||||
)
|
||||
patches.enter_context(patch_basemodel)
|
||||
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
|
||||
wrapper: Callable = make_wrapper(factory)
|
||||
patch = unittest.mock.patch(
|
||||
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
|
||||
)
|
||||
patches.enter_context(patch)
|
||||
yield
|
||||
|
||||
|
||||
def format_model_checker_exception(e: ModelCheckerException) -> str:
|
||||
"""Work out which line of code caused e. Format the line in a human-friendly way."""
|
||||
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
|
||||
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
|
||||
# with strict=False? There is some difficulty with the inheritance hierarchy
|
||||
# because StrictStr < ConstrainedStr < str.
|
||||
if isinstance(e, FieldHasUnwantedTypeException):
|
||||
return e.message
|
||||
elif isinstance(e, MissingStrictInConstrainedTypeException):
|
||||
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
|
||||
return (
|
||||
f"Missing `strict=True` from {e.factory_name}() call \n"
|
||||
+ traceback.format_list([frame_summary])[0].lstrip()
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown exception {e}") from e
|
||||
|
||||
|
||||
def lint() -> int:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
|
||||
|
||||
Print any problems, then return a status code suitable for sys.exit."""
|
||||
failures = do_lint()
|
||||
if failures:
|
||||
print(f"Found {len(failures)} problem(s)")
|
||||
for failure in sorted(failures):
|
||||
print(failure)
|
||||
return os.EX_DATAERR if failures else os.EX_OK
|
||||
|
||||
|
||||
def do_lint() -> Set[str]:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||
failures = set()
|
||||
|
||||
with monkeypatch_pydantic():
|
||||
logger.debug("Importing synapse")
|
||||
try:
|
||||
# TODO: make "synapse" an argument so we can target this script at
|
||||
# a subpackage
|
||||
module = importlib.import_module("synapse")
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when importing synapse")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
try:
|
||||
logger.debug("Fetching subpackages")
|
||||
module_infos = list(
|
||||
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
|
||||
)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when looking for modules to import")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
for module_info in module_infos:
|
||||
logger.debug("Importing %s", module_info.name)
|
||||
try:
|
||||
importlib.import_module(module_info.name)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning(
|
||||
"Bad annotation found when importing %s", module_info.name
|
||||
)
|
||||
failures.add(format_model_checker_exception(e))
|
||||
|
||||
return failures
|
||||
|
||||
|
||||
def run_test_snippet(source: str) -> None:
|
||||
"""Exec a snippet of source code in an isolated environment."""
|
||||
# To emulate `source` being called at the top level of the module,
|
||||
# the globals and locals we provide apparently have to be the same mapping.
|
||||
#
|
||||
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||
# > If exec gets two separate objects as globals and locals, the code will be
|
||||
# > executed as if it were embedded in a class definition.
|
||||
globals_: Dict[str, object]
|
||||
locals_: Dict[str, object]
|
||||
globals_ = locals_ = {}
|
||||
exec(textwrap.dedent(source), globals_, locals_)
|
||||
|
||||
|
||||
class TestConstrainedTypesPatch(unittest.TestCase):
|
||||
def test_expression_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_called_as_module_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
import pydantic
|
||||
pydantic.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_wildcard_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.types import constr
|
||||
except ImportError:
|
||||
from pydantic.types import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import types as pydantic_types
|
||||
except ImportError:
|
||||
from pydantic import types as pydantic_types
|
||||
pydantic_types.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_but_no_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(min_length=10)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_False_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=False)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_True_doesnt_raise(self) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=True)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
x: constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import BaseModel, conint
|
||||
except ImportError:
|
||||
from pydantic import BaseModel, conint
|
||||
class C:
|
||||
x: conint()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class TestFieldTypeInspection(unittest.TestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
("str",),
|
||||
("bytes"),
|
||||
("int",),
|
||||
("float",),
|
||||
("bool"),
|
||||
("Optional[str]",),
|
||||
("Union[None, str]",),
|
||||
("List[str]",),
|
||||
("List[List[str]]",),
|
||||
("Dict[StrictStr, str]",),
|
||||
("Dict[str, StrictStr]",),
|
||||
("TypedDict('D', x=int)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("StrictStr",),
|
||||
("StrictBytes"),
|
||||
("StrictInt",),
|
||||
("StrictFloat",),
|
||||
("StrictBool"),
|
||||
("constr(strict=True, min_length=10)",),
|
||||
("Optional[StrictStr]",),
|
||||
("Union[None, StrictStr]",),
|
||||
("List[StrictStr]",),
|
||||
("List[List[StrictStr]]",),
|
||||
("Dict[StrictStr, StrictStr]",),
|
||||
("TypedDict('D', x=StrictInt)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_holding_str_raises_with_alternative_import(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.main import BaseModel
|
||||
except ImportError:
|
||||
from pydantic.main import BaseModel
|
||||
class C(BaseModel):
|
||||
f: str
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
|
||||
parser.add_argument("-v", "--verbose", action="store_true")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
|
||||
level=logging.DEBUG if args.verbose else logging.INFO,
|
||||
)
|
||||
# suppress logs we don't care about
|
||||
logging.getLogger("xmlschema").setLevel(logging.WARNING)
|
||||
if args.mode == "lint":
|
||||
sys.exit(lint())
|
||||
elif args.mode == "test":
|
||||
unittest.main(argv=sys.argv[:1])
|
||||
@@ -5,15 +5,19 @@
|
||||
# Also checks that schema deltas do not try and create or drop indices.
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
import git
|
||||
|
||||
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
||||
INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
|
||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
|
||||
TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
|
||||
INDEX_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*INDEX .*ON ([a-z_0-9]+)", flags=re.IGNORECASE
|
||||
)
|
||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_0-9]+)", flags=re.IGNORECASE)
|
||||
TABLE_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*TABLE.* ([a-z_0-9]+)\s*\(", flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
# The base branch we want to check against. We use the main development branch
|
||||
# on the assumption that is what we are developing against.
|
||||
@@ -48,16 +52,16 @@ def main(force_colors: bool) -> None:
|
||||
|
||||
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
|
||||
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(r, locals)
|
||||
current_schema_version = locals["SCHEMA_VERSION"]
|
||||
|
||||
diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
|
||||
# Get the schema version of the local file to check against current schema on develop
|
||||
with open("synapse/storage/schema/__init__.py") as file:
|
||||
local_schema = file.read()
|
||||
new_locals: Dict[str, Any] = {}
|
||||
new_locals: dict[str, Any] = {}
|
||||
exec(local_schema, new_locals)
|
||||
local_schema_version = new_locals["SCHEMA_VERSION"]
|
||||
|
||||
@@ -173,11 +177,14 @@ def main(force_colors: bool) -> None:
|
||||
clause = match.group()
|
||||
|
||||
click.secho(
|
||||
f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||
f"Found delta with index deletion: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates.",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
# Check for index creation, which is only allowed for tables we've
|
||||
@@ -188,11 +195,14 @@ def main(force_colors: bool) -> None:
|
||||
table_name = match.group(1)
|
||||
if table_name not in created_tables:
|
||||
click.secho(
|
||||
f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||
f"Found delta with index creation for existing table: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates (or the table should be created in the same delta).",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
click.get_current_context().exit(return_code)
|
||||
|
||||
@@ -43,7 +43,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
from typing import Any, Mapping, Optional, Union
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -147,7 +147,7 @@ def request(
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
headers: dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
request: PreparedRequest,
|
||||
verify: Optional[Union[bool, str]],
|
||||
proxies: Optional[Mapping[str, str]] = None,
|
||||
cert: Optional[Union[Tuple[str, str], str]] = None,
|
||||
cert: Optional[Union[tuple[str, str], str]] = None,
|
||||
) -> HTTPConnectionPool:
|
||||
# overrides the get_connection_with_tls_context() method in the base class
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
@@ -326,7 +326,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _lookup(server_name: str) -> Tuple[str, int, str]:
|
||||
def _lookup(server_name: str) -> tuple[str, int, str]:
|
||||
"""
|
||||
Do an SRV lookup on a server name and return the host:port to connect to
|
||||
Given the server_name (after any .well-known lookup), return the host, port and
|
||||
|
||||
@@ -134,9 +134,6 @@ fi
|
||||
# Ensure the formatting of Rust code.
|
||||
cargo-fmt
|
||||
|
||||
# Ensure all Pydantic models use strict types.
|
||||
./scripts-dev/check_pydantic_models.py lint
|
||||
|
||||
# Ensure type hints are correct.
|
||||
mypy
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
import enum
|
||||
from typing import Callable, Mapping, Optional, Tuple, Type, Union
|
||||
from typing import Callable, Mapping, Optional, Union
|
||||
|
||||
import attr
|
||||
import mypy.types
|
||||
@@ -184,8 +184,8 @@ should be in the source code.
|
||||
|
||||
# Unbound at this point because we don't know the mypy version yet.
|
||||
# This is set in the `plugin(...)` function below.
|
||||
MypyPydanticPluginClass: Type[Plugin]
|
||||
MypyZopePluginClass: Type[Plugin]
|
||||
MypyPydanticPluginClass: type[Plugin]
|
||||
MypyZopePluginClass: type[Plugin]
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
@@ -795,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
||||
|
||||
def is_cacheable(
|
||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Check if a particular type is cachable.
|
||||
|
||||
@@ -905,7 +905,7 @@ def is_cacheable(
|
||||
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
|
||||
|
||||
|
||||
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||
def plugin(version: str) -> type[SynapsePlugin]:
|
||||
global MypyPydanticPluginClass, MypyZopePluginClass
|
||||
# This is the entry point of the plugin, and lets us deal with the fact
|
||||
# that the mypy plugin interface is *not* stable by looking at the version
|
||||
|
||||
@@ -32,11 +32,13 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Match, Optional, Union
|
||||
from typing import Any, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
import click
|
||||
import git
|
||||
import github
|
||||
import github.Auth
|
||||
from click.exceptions import ClickException
|
||||
from git import GitCommandError, Repo
|
||||
from github import BadCredentialsException, Github
|
||||
@@ -314,7 +316,10 @@ def _prepare() -> None:
|
||||
)
|
||||
|
||||
print("Opening the changelog in your browser...")
|
||||
print("Please ask #synapse-dev to give it a check.")
|
||||
print(
|
||||
"Please review it using the release notes review checklist: https://element-hq.github.io/synapse/develop/development/internal_documentation/release_notes_review_checklist.html"
|
||||
)
|
||||
print("And post it in #synapse-dev for cursory review from the team.")
|
||||
click.launch(
|
||||
f"https://github.com/element-hq/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
@@ -397,7 +402,7 @@ def _tag(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
# Create a new draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
release = gh_repo.create_git_release(
|
||||
tag=tag_name,
|
||||
@@ -428,7 +433,7 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
@@ -441,7 +446,7 @@ def _publish(gh_token: str) -> None:
|
||||
return
|
||||
|
||||
# Publish the draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
for release in gh_repo.get_releases():
|
||||
if release.title == tag_name:
|
||||
@@ -486,8 +491,13 @@ def _upload(gh_token: Optional[str]) -> None:
|
||||
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
if gh_token:
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
else:
|
||||
# Use github anonymously.
|
||||
gh = Github()
|
||||
|
||||
# Query all the assets corresponding to this release.
|
||||
gh = Github(gh_token)
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
gh_release = gh_repo.get_release(tag_name)
|
||||
|
||||
@@ -639,7 +649,16 @@ def _notify(message: str) -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def merge_back() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -687,7 +706,16 @@ def _merge_back() -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -696,18 +724,31 @@ def _announce() -> None:
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
is_rc = "rc" in tag_name
|
||||
|
||||
release_text = f"""
|
||||
### Synapse {current_version} {"🧪" if is_rc else "🚀"}
|
||||
|
||||
click.echo(
|
||||
f"""
|
||||
Hi everyone. Synapse {current_version} has just been released.
|
||||
"""
|
||||
|
||||
if "rc" in tag_name:
|
||||
release_text += (
|
||||
"\nThis is a release candidate. Please help us test it out "
|
||||
"before the final release by deploying it to non-production environments, "
|
||||
"and reporting any issues you find to "
|
||||
"[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n"
|
||||
)
|
||||
|
||||
release_text += f"""
|
||||
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
|
||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||
[debs](https://packages.matrix.org/debian/) | \
|
||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||
)
|
||||
|
||||
if "rc" in tag_name:
|
||||
click.echo(release_text)
|
||||
|
||||
if is_rc:
|
||||
click.echo(
|
||||
"""
|
||||
Announce the RC in
|
||||
@@ -732,7 +773,7 @@ Ask the designated people to do the blog and tweets."""
|
||||
def full(gh_token: str) -> None:
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
click.echo("1. If this is a security release, read the security wiki page.")
|
||||
@@ -801,8 +842,12 @@ def get_repo_and_check_clean_checkout(
|
||||
raise click.ClickException(
|
||||
f"{path} is not a git repository (expecting a {name} repository)."
|
||||
)
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException(f"Uncommitted changes exist in {path}.")
|
||||
while repo.is_dirty():
|
||||
if not click.confirm(
|
||||
f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"
|
||||
):
|
||||
raise click.ClickException("Aborted.")
|
||||
|
||||
return repo
|
||||
|
||||
|
||||
@@ -814,7 +859,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
try:
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
|
||||
# We need to lookup name to trigger a request.
|
||||
_name = gh.get_user().name
|
||||
@@ -861,7 +906,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
start_line: int
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
|
||||
headings: List[VersionSection] = []
|
||||
headings: list[VersionSection] = []
|
||||
for i, token in enumerate(tokens):
|
||||
# We look for level 1 headings (h1 tags).
|
||||
if token.type != "heading_open" or token.tag != "h1":
|
||||
|
||||
@@ -38,7 +38,7 @@ import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict, Iterator, Optional, Tuple
|
||||
from typing import Any, Iterator, Optional
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
@@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
||||
# thing to do is exec the code. Luckily it has only ever existed in
|
||||
# a file which imports nothing else from Synapse.
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
||||
schema_version = locals["SCHEMA_VERSION"]
|
||||
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
||||
|
||||
@@ -7,18 +7,14 @@ from __future__ import annotations
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Hashable,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
ValuesView,
|
||||
@@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True)
|
||||
_SD = TypeVar("_SD", bound=SortedDict)
|
||||
_Key = Callable[[_T], Any]
|
||||
|
||||
class SortedDict(Dict[_KT, _VT]):
|
||||
class SortedDict(dict[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
|
||||
@@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_KT]]: ...
|
||||
@@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
def pop(self, key: _KT) -> _VT: ...
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
@@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
# def update(self, **kwargs: _VT) -> None: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Type[SortedDict[_KT, _VT]],
|
||||
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
||||
) -> tuple[
|
||||
type[SortedDict[_KT, _VT]],
|
||||
tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]],
|
||||
]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
@@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _KT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ...
|
||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _VT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_VT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
@@ -9,12 +9,9 @@ from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]):
|
||||
): ...
|
||||
# NB: currently mypy does not honour return type, see mypy #3307
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
@@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def _getitem(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def _getitem(self, index: slice) -> List[_T]: ...
|
||||
def _getitem(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None: ...
|
||||
@overload
|
||||
@@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
|
||||
@@ -10,13 +10,9 @@ from typing import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def _fromset(
|
||||
cls, values: Set[_T], key: Optional[_Key[_T]] = ...
|
||||
cls, values: set[_T], key: Optional[_Key[_T]] = ...
|
||||
) -> SortedSet[_T]: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_T]]: ...
|
||||
@@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __eq__(self, other: Any) -> bool: ...
|
||||
def __ne__(self, other: Any) -> bool: ...
|
||||
@@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ...
|
||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
self,
|
||||
minimum: Optional[_T] = ...,
|
||||
maximum: Optional[_T] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def index(
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any, List, Optional, Type, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol):
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
|
||||
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
@@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ...
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: ConnectionHandler
|
||||
pool: List[RedisProtocol]
|
||||
pool: list[RedisProtocol]
|
||||
replyTimeout: Optional[int]
|
||||
def __init__(
|
||||
self,
|
||||
@@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
dbid: Optional[int],
|
||||
poolsize: int,
|
||||
isLazy: bool = False,
|
||||
handler: Type = ConnectionHandler,
|
||||
handler: type = ConnectionHandler,
|
||||
charset: str = "utf-8",
|
||||
password: Optional[str] = None,
|
||||
replyTimeout: Optional[int] = None,
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from PIL import ImageFile
|
||||
|
||||
@@ -39,8 +39,8 @@ ImageFile.LOAD_TRUNCATED_IMAGES = True
|
||||
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
|
||||
# if-statement completely.
|
||||
py_version = sys.version_info
|
||||
if py_version < (3, 9):
|
||||
print("Synapse requires Python 3.9 or above.")
|
||||
if py_version < (3, 10):
|
||||
print("Synapse requires Python 3.10 or above.")
|
||||
sys.exit(1)
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
@@ -70,7 +70,7 @@ try:
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
def _immutabledict_cb(d: immutabledict) -> dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2023 Maxwell G <maxwell@gtmx.me>
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
try:
|
||||
from pydantic import __version__ as pydantic_version
|
||||
except ImportError:
|
||||
import importlib.metadata
|
||||
|
||||
pydantic_version = importlib.metadata.version("pydantic")
|
||||
|
||||
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
from pydantic.v1.typing import get_args
|
||||
else:
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
from pydantic.typing import get_args
|
||||
|
||||
__all__ = (
|
||||
"HAS_PYDANTIC_V2",
|
||||
"AnyHttpUrl",
|
||||
"BaseModel",
|
||||
"constr",
|
||||
"conbytes",
|
||||
"conint",
|
||||
"confloat",
|
||||
"ErrorWrapper",
|
||||
"Extra",
|
||||
"Field",
|
||||
"FilePath",
|
||||
"get_args",
|
||||
"MissingError",
|
||||
"parse_obj_as",
|
||||
"PydanticValueError",
|
||||
"StrictBool",
|
||||
"StrictInt",
|
||||
"StrictStr",
|
||||
"ValidationError",
|
||||
"validator",
|
||||
"root_validator",
|
||||
)
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
from typing import Iterable, Optional, Pattern
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -81,7 +81,7 @@ class EnumerationResource(HttpServer):
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self.registrations: dict[tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
@@ -115,7 +115,7 @@ class EnumerationResource(HttpServer):
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
@@ -142,7 +142,7 @@ def get_registered_paths_for_hs(
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
@@ -168,9 +168,9 @@ def get_registered_paths_for_default(
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
@@ -180,13 +180,13 @@ def elide_http_methods_if_unconflicting(
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]],
|
||||
) -> Dict[str, Set[str]]:
|
||||
methods_and_paths: Iterable[tuple[str, str]],
|
||||
) -> dict[str, set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
result: dict[str, set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
@@ -210,8 +210,8 @@ def elide_http_methods_if_unconflicting(
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
@@ -270,8 +270,8 @@ def main() -> None:
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
categories_to_methods_and_paths: dict[
|
||||
Optional[str], dict[tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
@@ -283,7 +283,7 @@ def main() -> None:
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
@@ -73,8 +73,18 @@ def main() -> None:
|
||||
|
||||
pw = unicodedata.normalize("NFKC", password)
|
||||
|
||||
bytes_to_hash = pw.encode("utf8") + password_pepper.encode("utf8")
|
||||
if len(bytes_to_hash) > 72:
|
||||
# bcrypt only looks at the first 72 bytes
|
||||
print(
|
||||
f"Password + pepper is too long ({len(bytes_to_hash)} bytes); truncating to 72 bytes for bcrypt. "
|
||||
"This is expected behaviour and will not affect a user's ability to log in. 72 bytes is "
|
||||
"sufficient entropy for a password."
|
||||
)
|
||||
bytes_to_hash = bytes_to_hash[:72]
|
||||
|
||||
hashed = bcrypt.hashpw(
|
||||
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||
bytes_to_hash,
|
||||
bcrypt.gensalt(bcrypt_rounds),
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
@@ -262,7 +262,7 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
config: Optional[dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
@@ -350,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
|
||||
@@ -23,7 +23,6 @@ import argparse
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import attr
|
||||
|
||||
@@ -50,15 +49,15 @@ class ReviewConfig(RootConfig):
|
||||
class UserInfo:
|
||||
user_id: str
|
||||
creation_ts: int
|
||||
emails: List[str] = attr.Factory(list)
|
||||
private_rooms: List[str] = attr.Factory(list)
|
||||
public_rooms: List[str] = attr.Factory(list)
|
||||
ips: List[str] = attr.Factory(list)
|
||||
emails: list[str] = attr.Factory(list)
|
||||
private_rooms: list[str] = attr.Factory(list)
|
||||
public_rooms: list[str] = attr.Factory(list)
|
||||
ips: list[str] = attr.Factory(list)
|
||||
|
||||
|
||||
def get_recent_users(
|
||||
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
|
||||
) -> List[UserInfo]:
|
||||
) -> list[UserInfo]:
|
||||
"""Fetches recently registered users and some info on them."""
|
||||
|
||||
sql = """
|
||||
|
||||
@@ -33,15 +33,10 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
cast,
|
||||
@@ -98,7 +93,6 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -245,7 +239,7 @@ end_error: Optional[str] = None
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info: Optional[
|
||||
Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
tuple[type[BaseException], BaseException, TracebackType]
|
||||
] = None
|
||||
|
||||
R = TypeVar("R")
|
||||
@@ -282,8 +276,8 @@ class Store(
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> List[Tuple]:
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]:
|
||||
def r(txn: LoggingTransaction) -> list[tuple]:
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
|
||||
@@ -293,8 +287,8 @@ class Store(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
headers: List[str],
|
||||
rows: List[Tuple],
|
||||
headers: list[str],
|
||||
rows: list[tuple],
|
||||
override_system_value: bool = False,
|
||||
) -> None:
|
||||
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
||||
@@ -325,14 +319,13 @@ class MockHomeserver(HomeServer):
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
|
||||
class Porter:
|
||||
def __init__(
|
||||
self,
|
||||
sqlite_config: Dict[str, Any],
|
||||
sqlite_config: dict[str, Any],
|
||||
progress: "Progress",
|
||||
batch_size: int,
|
||||
hs: HomeServer,
|
||||
@@ -342,7 +335,7 @@ class Porter:
|
||||
self.batch_size = batch_size
|
||||
self.hs = hs
|
||||
|
||||
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
|
||||
async def setup_table(self, table: str) -> tuple[str, int, int, int, int]:
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
row = await self.postgres_store.db_pool.simple_select_one(
|
||||
@@ -405,10 +398,10 @@ class Porter:
|
||||
|
||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
async def get_table_constraints(self) -> Dict[str, Set[str]]:
|
||||
async def get_table_constraints(self) -> dict[str, set[str]]:
|
||||
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
||||
|
||||
def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]:
|
||||
def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]:
|
||||
# We can pull the information about foreign key constraints out from
|
||||
# the postgres schema tables.
|
||||
sql = """
|
||||
@@ -424,7 +417,7 @@ class Porter:
|
||||
"""
|
||||
txn.execute(sql)
|
||||
|
||||
results: Dict[str, Set[str]] = {}
|
||||
results: dict[str, set[str]] = {}
|
||||
for table, foreign_table in txn:
|
||||
results.setdefault(table, set()).add(foreign_table)
|
||||
return results
|
||||
@@ -492,7 +485,7 @@ class Porter:
|
||||
|
||||
def r(
|
||||
txn: LoggingTransaction,
|
||||
) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]:
|
||||
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
@@ -509,7 +502,7 @@ class Porter:
|
||||
|
||||
if forward_rows or backward_rows:
|
||||
assert txn.description is not None
|
||||
headers: Optional[List[str]] = [
|
||||
headers: Optional[list[str]] = [
|
||||
column[0] for column in txn.description
|
||||
]
|
||||
else:
|
||||
@@ -576,7 +569,7 @@ class Porter:
|
||||
|
||||
while True:
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select, (forward_chunk, self.batch_size))
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -958,7 +951,7 @@ class Porter:
|
||||
self.progress.set_state("Copying to postgres")
|
||||
|
||||
constraints = await self.get_table_constraints()
|
||||
tables_ported = set() # type: Set[str]
|
||||
tables_ported = set() # type: set[str]
|
||||
|
||||
while tables_to_port_info_map:
|
||||
# Pulls out all tables that are still to be ported and which
|
||||
@@ -997,8 +990,8 @@ class Porter:
|
||||
reactor.stop()
|
||||
|
||||
def _convert_rows(
|
||||
self, table: str, headers: List[str], rows: List[Tuple]
|
||||
) -> List[Tuple]:
|
||||
self, table: str, headers: list[str], rows: list[tuple]
|
||||
) -> list[tuple]:
|
||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||
|
||||
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
||||
@@ -1032,7 +1025,7 @@ class Porter:
|
||||
|
||||
return outrows
|
||||
|
||||
async def _setup_sent_transactions(self) -> Tuple[int, int, int]:
|
||||
async def _setup_sent_transactions(self) -> tuple[int, int, int]:
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time() * 1000) - 86400000
|
||||
|
||||
@@ -1044,7 +1037,7 @@ class Porter:
|
||||
")"
|
||||
)
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select)
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -1114,14 +1107,14 @@ class Porter:
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> int:
|
||||
frows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
),
|
||||
)
|
||||
|
||||
brows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
),
|
||||
@@ -1138,7 +1131,7 @@ class Porter:
|
||||
|
||||
async def _get_total_count_to_port(
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> Tuple[int, int]:
|
||||
) -> tuple[int, int]:
|
||||
remaining, done = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
@@ -1223,7 +1216,7 @@ class Porter:
|
||||
async def _setup_sequence(
|
||||
self,
|
||||
sequence_name: str,
|
||||
stream_id_tables: Iterable[Tuple[str, str]],
|
||||
stream_id_tables: Iterable[tuple[str, str]],
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value."""
|
||||
current_stream_ids = []
|
||||
@@ -1333,7 +1326,7 @@ class Progress:
|
||||
"""Used to report progress of the port"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.tables: Dict[str, TableProgress] = {}
|
||||
self.tables: dict[str, TableProgress] = {}
|
||||
|
||||
self.start_time = int(time.time())
|
||||
|
||||
|
||||
@@ -31,7 +31,6 @@ from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -47,7 +46,6 @@ class MockHomeserver(HomeServer):
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
|
||||
from typing import TYPE_CHECKING, Optional, Protocol
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
@@ -51,7 +51,7 @@ class Auth(Protocol):
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -190,7 +190,7 @@ class Auth(Protocol):
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from netaddr import IPAddress
|
||||
|
||||
@@ -64,7 +64,7 @@ class BaseAuth:
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -114,7 +114,7 @@ class BaseAuth:
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
@@ -302,12 +302,9 @@ class BaseAuth:
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
(the device_id URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
@@ -319,7 +316,8 @@ class BaseAuth:
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
# TODO: We can drop unstable support after 2026-01-01 (couple months after stable support)
|
||||
UNSTABLE_DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(access_token)
|
||||
if app_service is None:
|
||||
@@ -341,13 +339,11 @@ class BaseAuth:
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
effective_device_id_args = request.args.get(
|
||||
b"device_id", request.args.get(UNSTABLE_DEVICE_ID_ARG_NAME)
|
||||
)
|
||||
if effective_device_id_args:
|
||||
effective_device_id = effective_device_id_args[0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
@@ -359,6 +355,8 @@ class BaseAuth:
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.UNKNOWN_DEVICE,
|
||||
)
|
||||
else:
|
||||
effective_device_id = None
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
|
||||
@@ -13,17 +13,19 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Set
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
ConfigDict,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from synapse.api.auth.base import BaseAuth
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
@@ -63,8 +65,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
|
||||
|
||||
class ServerMetadata(BaseModel):
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
issuer: StrictStr
|
||||
account_management_uri: StrictStr
|
||||
@@ -73,14 +74,12 @@ class ServerMetadata(BaseModel):
|
||||
class IntrospectionResponse(BaseModel):
|
||||
retrieved_at_ms: StrictInt
|
||||
active: StrictBool
|
||||
scope: Optional[StrictStr]
|
||||
username: Optional[StrictStr]
|
||||
sub: Optional[StrictStr]
|
||||
device_id: Optional[StrictStr]
|
||||
expires_in: Optional[StrictInt]
|
||||
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
scope: Optional[StrictStr] = None
|
||||
username: Optional[StrictStr] = None
|
||||
sub: Optional[StrictStr] = None
|
||||
device_id: Optional[StrictStr] = None
|
||||
expires_in: Optional[StrictInt] = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
def get_scope_set(self) -> set[str]:
|
||||
if not self.scope:
|
||||
@@ -148,11 +147,33 @@ class MasDelegatedAuth(BaseAuth):
|
||||
|
||||
@property
|
||||
def _metadata_url(self) -> str:
|
||||
return f"{self._config.endpoint.rstrip('/')}/.well-known/openid-configuration"
|
||||
return str(
|
||||
AnyHttpUrl.build(
|
||||
scheme=self._config.endpoint.scheme,
|
||||
username=self._config.endpoint.username,
|
||||
password=self._config.endpoint.password,
|
||||
host=self._config.endpoint.host or "",
|
||||
port=self._config.endpoint.port,
|
||||
path=".well-known/openid-configuration",
|
||||
query=None,
|
||||
fragment=None,
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def _introspection_endpoint(self) -> str:
|
||||
return f"{self._config.endpoint.rstrip('/')}/oauth2/introspect"
|
||||
return str(
|
||||
AnyHttpUrl.build(
|
||||
scheme=self._config.endpoint.scheme,
|
||||
username=self._config.endpoint.username,
|
||||
password=self._config.endpoint.password,
|
||||
host=self._config.endpoint.host or "",
|
||||
port=self._config.endpoint.port,
|
||||
path="oauth2/introspect",
|
||||
query=None,
|
||||
fragment=None,
|
||||
)
|
||||
)
|
||||
|
||||
async def _load_metadata(self) -> ServerMetadata:
|
||||
response = await self._http_client.get_json(self._metadata_url)
|
||||
@@ -369,7 +390,7 @@ class MasDelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids: Set[str] = set()
|
||||
device_ids: set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -70,7 +70,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*"
|
||||
|
||||
|
||||
def scope_to_list(scope: str) -> List[str]:
|
||||
def scope_to_list(scope: str) -> list[str]:
|
||||
"""Convert a scope string to a list of scope tokens"""
|
||||
return scope.strip().split(" ")
|
||||
|
||||
@@ -96,7 +96,7 @@ class IntrospectionResult:
|
||||
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
|
||||
return now_ms < absolute_expiry_ms
|
||||
|
||||
def get_scope_list(self) -> List[str]:
|
||||
def get_scope_list(self) -> list[str]:
|
||||
value = self._inner.get("scope")
|
||||
if not isinstance(value, str):
|
||||
return []
|
||||
@@ -264,7 +264,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
logger.warning("Failed to load metadata:", exc_info=True)
|
||||
return None
|
||||
|
||||
async def auth_metadata(self) -> Dict[str, Any]:
|
||||
async def auth_metadata(self) -> dict[str, Any]:
|
||||
"""
|
||||
Returns the auth metadata dict
|
||||
"""
|
||||
@@ -303,7 +303,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# By default, we shouldn't cache the result unless we know it's valid
|
||||
cache_context.should_cache = False
|
||||
introspection_endpoint = await self._introspection_endpoint()
|
||||
raw_headers: Dict[str, str] = {
|
||||
raw_headers: dict[str, str] = {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Accept": "application/json",
|
||||
# Tell MAS that we support reading the device ID as an explicit
|
||||
@@ -520,7 +520,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
raise InvalidClientTokenError("Token is not active")
|
||||
|
||||
# Let's look at the scope
|
||||
scope: List[str] = introspection_result.get_scope_list()
|
||||
scope: list[str] = introspection_result.get_scope_list()
|
||||
|
||||
# Determine type of user based on presence of particular scopes
|
||||
has_user_scope = (
|
||||
@@ -575,7 +575,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids: Set[str] = set()
|
||||
device_ids: set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
@@ -26,7 +26,7 @@ import math
|
||||
import typing
|
||||
from enum import Enum
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
@@ -166,7 +166,7 @@ class CodeMessageException(RuntimeError):
|
||||
self,
|
||||
code: Union[int, HTTPStatus],
|
||||
msg: str,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
super().__init__("%d: %s" % (code, msg))
|
||||
|
||||
@@ -201,7 +201,7 @@ class RedirectException(CodeMessageException):
|
||||
super().__init__(code=http_code, msg=msg)
|
||||
self.location = location
|
||||
|
||||
self.cookies: List[bytes] = []
|
||||
self.cookies: list[bytes] = []
|
||||
|
||||
|
||||
class SynapseError(CodeMessageException):
|
||||
@@ -223,8 +223,8 @@ class SynapseError(CodeMessageException):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[Dict] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
"""Constructs a synapse error.
|
||||
|
||||
@@ -236,7 +236,7 @@ class SynapseError(CodeMessageException):
|
||||
super().__init__(code, msg, headers)
|
||||
self.errcode = errcode
|
||||
if additional_fields is None:
|
||||
self._additional_fields: Dict = {}
|
||||
self._additional_fields: dict = {}
|
||||
else:
|
||||
self._additional_fields = dict(additional_fields)
|
||||
|
||||
@@ -276,7 +276,7 @@ class ProxiedRequestError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[Dict] = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -409,7 +409,7 @@ class OAuthInsufficientScopeError(SynapseError):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
required_scopes: List[str],
|
||||
required_scopes: list[str],
|
||||
):
|
||||
headers = {
|
||||
"WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"'
|
||||
|
||||
@@ -26,12 +26,9 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
@@ -248,34 +245,34 @@ class FilterCollection:
|
||||
|
||||
async def filter_presence(
|
||||
self, presence_states: Iterable[UserPresenceState]
|
||||
) -> List[UserPresenceState]:
|
||||
) -> list[UserPresenceState]:
|
||||
return await self._presence_filter.filter(presence_states)
|
||||
|
||||
async def filter_global_account_data(
|
||||
self, events: Iterable[JsonDict]
|
||||
) -> List[JsonDict]:
|
||||
) -> list[JsonDict]:
|
||||
return await self._global_account_data_filter.filter(events)
|
||||
|
||||
async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]:
|
||||
async def filter_room_state(self, events: Iterable[EventBase]) -> list[EventBase]:
|
||||
return await self._room_state_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_timeline(
|
||||
self, events: Iterable[EventBase]
|
||||
) -> List[EventBase]:
|
||||
) -> list[EventBase]:
|
||||
return await self._room_timeline_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]:
|
||||
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> list[JsonDict]:
|
||||
return await self._room_ephemeral_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_account_data(
|
||||
self, events: Iterable[JsonDict]
|
||||
) -> List[JsonDict]:
|
||||
) -> list[JsonDict]:
|
||||
return await self._room_account_data_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
@@ -440,7 +437,7 @@ class Filter:
|
||||
|
||||
return True
|
||||
|
||||
def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> bool:
|
||||
def _check_fields(self, field_matchers: dict[str, Callable[[str], bool]]) -> bool:
|
||||
"""Checks whether the filter matches the given event fields.
|
||||
|
||||
Args:
|
||||
@@ -474,7 +471,7 @@ class Filter:
|
||||
# Otherwise, accept it.
|
||||
return True
|
||||
|
||||
def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]:
|
||||
def filter_rooms(self, room_ids: Iterable[str]) -> set[str]:
|
||||
"""Apply the 'rooms' filter to a given list of rooms.
|
||||
|
||||
Args:
|
||||
@@ -496,7 +493,7 @@ class Filter:
|
||||
|
||||
async def _check_event_relations(
|
||||
self, events: Collection[FilterEvent]
|
||||
) -> List[FilterEvent]:
|
||||
) -> list[FilterEvent]:
|
||||
# The event IDs to check, mypy doesn't understand the isinstance check.
|
||||
event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined]
|
||||
event_ids_to_keep = set(
|
||||
@@ -511,7 +508,7 @@ class Filter:
|
||||
if not isinstance(event, EventBase) or event.event_id in event_ids_to_keep
|
||||
]
|
||||
|
||||
async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
|
||||
async def filter(self, events: Iterable[FilterEvent]) -> list[FilterEvent]:
|
||||
result = [event for event in events if self._check(event)]
|
||||
|
||||
if self.related_by_senders or self.related_by_rel_types:
|
||||
|
||||
@@ -20,13 +20,14 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Hashable, Optional
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
from synapse.config.ratelimiting import RatelimitSettings
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.wheel_timer import WheelTimer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# To avoid circular imports:
|
||||
@@ -92,9 +93,14 @@ class Ratelimiter:
|
||||
# * The number of tokens currently in the bucket,
|
||||
# * The time point when the bucket was last completely empty, and
|
||||
# * The rate_hz (leak rate) of this particular bucket.
|
||||
self.actions: Dict[Hashable, Tuple[float, float, float]] = {}
|
||||
self.actions: dict[Hashable, tuple[int, float, float]] = {}
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
|
||||
# Records when actions should potentially be pruned. Note that we don't
|
||||
# need to be accurate here, as this is just a cleanup job of `actions`
|
||||
# and doesn't affect correctness.
|
||||
self._timer: WheelTimer[Hashable] = WheelTimer()
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, 15 * 1000)
|
||||
|
||||
def _get_key(
|
||||
self, requester: Optional[Requester], key: Optional[Hashable]
|
||||
@@ -109,9 +115,9 @@ class Ratelimiter:
|
||||
|
||||
def _get_action_counts(
|
||||
self, key: Hashable, time_now_s: float
|
||||
) -> Tuple[float, float, float]:
|
||||
) -> tuple[int, float, float]:
|
||||
"""Retrieve the action counts, with a fallback representing an empty bucket."""
|
||||
return self.actions.get(key, (0.0, time_now_s, 0.0))
|
||||
return self.actions.get(key, (0, time_now_s, self.rate_hz))
|
||||
|
||||
async def can_do_action(
|
||||
self,
|
||||
@@ -122,7 +128,7 @@ class Ratelimiter:
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
) -> Tuple[bool, float]:
|
||||
) -> tuple[bool, float]:
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
Checks if the user has ratelimiting disabled in the database by looking
|
||||
@@ -217,8 +223,11 @@ class Ratelimiter:
|
||||
allowed = True
|
||||
action_count = action_count + n_actions
|
||||
|
||||
if update:
|
||||
self.actions[key] = (action_count, time_start, rate_hz)
|
||||
# Only record the action if we're allowed to perform it.
|
||||
if allowed and update:
|
||||
self._record_action_inner(
|
||||
key, action_count, time_start, rate_hz, time_now_s
|
||||
)
|
||||
|
||||
if rate_hz > 0:
|
||||
# Find out when the count of existing actions expires
|
||||
@@ -264,7 +273,37 @@ class Ratelimiter:
|
||||
key = self._get_key(requester, key)
|
||||
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
|
||||
action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s)
|
||||
self.actions[key] = (action_count + n_actions, time_start, rate_hz)
|
||||
self._record_action_inner(
|
||||
key, action_count + n_actions, time_start, rate_hz, time_now_s
|
||||
)
|
||||
|
||||
def _record_action_inner(
|
||||
self,
|
||||
key: Hashable,
|
||||
action_count: int,
|
||||
time_start: float,
|
||||
rate_hz: float,
|
||||
time_now_s: float,
|
||||
) -> None:
|
||||
"""Helper to atomically update the action count for a given key."""
|
||||
prune_time_s = time_start + action_count / rate_hz
|
||||
|
||||
# If the prune time is in the past, we can just remove the entry rather
|
||||
# than inserting and immediately pruning.
|
||||
if prune_time_s <= time_now_s:
|
||||
self.actions.pop(key, None)
|
||||
return
|
||||
|
||||
self.actions[key] = (action_count, time_start, rate_hz)
|
||||
|
||||
# We need to make sure that we only call prune *after* the entry
|
||||
# expires, otherwise the scheduled prune may not actually prune it. This
|
||||
# is just a cleanup job, so it doesn't matter if entries aren't pruned
|
||||
# immediately after they expire. Hence we schedule the prune a little
|
||||
# after the entry is due to expire.
|
||||
prune_time_s += 0.1
|
||||
|
||||
self._timer.insert(int(time_now_s * 1000), key, int(prune_time_s * 1000))
|
||||
|
||||
def _prune_message_counts(self) -> None:
|
||||
"""Remove message count entries that have not exceeded their defined
|
||||
@@ -272,18 +311,24 @@ class Ratelimiter:
|
||||
"""
|
||||
time_now_s = self.clock.time()
|
||||
|
||||
# We create a copy of the key list here as the dictionary is modified during
|
||||
# the loop
|
||||
for key in list(self.actions.keys()):
|
||||
action_count, time_start, rate_hz = self.actions[key]
|
||||
# Pull out all the keys that *might* need pruning. We still need to
|
||||
# verify they haven't since been updated.
|
||||
to_prune = self._timer.fetch(int(time_now_s * 1000))
|
||||
|
||||
for key in to_prune:
|
||||
value = self.actions.get(key)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
action_count, time_start, rate_hz = value
|
||||
|
||||
# Rate limit = "seconds since we started limiting this action" * rate_hz
|
||||
# If this limit has not been exceeded, wipe our record of this action
|
||||
time_delta = time_now_s - time_start
|
||||
if action_count - time_delta * rate_hz > 0:
|
||||
continue
|
||||
else:
|
||||
del self.actions[key]
|
||||
|
||||
del self.actions[key]
|
||||
|
||||
async def ratelimit(
|
||||
self,
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Callable, Dict, Optional, Tuple
|
||||
from typing import Callable, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -109,7 +109,7 @@ class RoomVersion:
|
||||
# is not enough to mark it "supported": the push rule evaluator also needs to
|
||||
# support the flag. Unknown flags are ignored by the evaluator, making conditions
|
||||
# fail if used.
|
||||
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
|
||||
msc3931_push_features: tuple[str, ...] # values from PushRuleRoomFlag
|
||||
# MSC3757: Restricting who can overwrite a state event
|
||||
msc3757_enabled: bool
|
||||
# MSC4289: Creator power enabled
|
||||
@@ -476,7 +476,7 @@ class RoomVersions:
|
||||
)
|
||||
|
||||
|
||||
KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
KNOWN_ROOM_VERSIONS: dict[str, RoomVersion] = {
|
||||
v.identifier: v
|
||||
for v in (
|
||||
RoomVersions.V1,
|
||||
|
||||
@@ -29,16 +29,15 @@ import traceback
|
||||
import warnings
|
||||
from textwrap import indent
|
||||
from threading import Thread
|
||||
from types import FrameType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
@@ -64,7 +63,6 @@ from twisted.web.resource import Resource
|
||||
import synapse.util.caches
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -76,7 +74,6 @@ from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers
|
||||
from synapse.module_api.callbacks.third_party_event_rules_callbacks import (
|
||||
@@ -99,8 +96,8 @@ reactor = cast(ISynapseReactor, _reactor)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_instance_id_to_sighup_callbacks_map: Dict[
|
||||
str, List[Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]]
|
||||
_instance_id_to_sighup_callbacks_map: dict[
|
||||
str, list[tuple[Callable[..., None], tuple[object, ...], dict[str, object]]]
|
||||
] = {}
|
||||
"""
|
||||
Map from homeserver instance_id to a list of callbacks.
|
||||
@@ -112,7 +109,7 @@ P = ParamSpec("P")
|
||||
|
||||
|
||||
def register_sighup(
|
||||
homeserver_instance_id: str,
|
||||
hs: "HomeServer",
|
||||
func: Callable[P, None],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
@@ -127,19 +124,25 @@ def register_sighup(
|
||||
*args, **kwargs: args and kwargs to be passed to the target function.
|
||||
"""
|
||||
|
||||
_instance_id_to_sighup_callbacks_map.setdefault(homeserver_instance_id, []).append(
|
||||
(func, args, kwargs)
|
||||
# Wrap the function so we can run it within a logcontext
|
||||
def _callback_wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
|
||||
with LoggingContext(name="sighup", server_name=hs.hostname):
|
||||
func(*args, **kwargs)
|
||||
|
||||
_instance_id_to_sighup_callbacks_map.setdefault(hs.get_instance_id(), []).append(
|
||||
(_callback_wrapper, args, kwargs)
|
||||
)
|
||||
|
||||
|
||||
def unregister_sighups(instance_id: str) -> None:
|
||||
def unregister_sighups(homeserver_instance_id: str) -> None:
|
||||
"""
|
||||
Unregister all sighup functions associated with this Synapse instance.
|
||||
|
||||
Args:
|
||||
instance_id: Unique ID for this Synapse process instance.
|
||||
homeserver_instance_id: The unique ID for this Synapse process instance to
|
||||
unregister hooks for (`hs.get_instance_id()`).
|
||||
"""
|
||||
_instance_id_to_sighup_callbacks_map.pop(instance_id, [])
|
||||
_instance_id_to_sighup_callbacks_map.pop(homeserver_instance_id, [])
|
||||
|
||||
|
||||
def start_worker_reactor(
|
||||
@@ -177,7 +180,7 @@ def start_worker_reactor(
|
||||
def start_reactor(
|
||||
appname: str,
|
||||
soft_file_limit: int,
|
||||
gc_thresholds: Optional[Tuple[int, int, int]],
|
||||
gc_thresholds: Optional[tuple[int, int, int]],
|
||||
pid_file: Optional[str],
|
||||
daemonize: bool,
|
||||
print_pidfile: bool,
|
||||
@@ -310,7 +313,7 @@ def register_start(
|
||||
|
||||
def listen_metrics(
|
||||
bind_addresses: StrCollection, port: int
|
||||
) -> List[Tuple[WSGIServer, Thread]]:
|
||||
) -> list[tuple[WSGIServer, Thread]]:
|
||||
"""
|
||||
Start Prometheus metrics server.
|
||||
|
||||
@@ -331,7 +334,7 @@ def listen_metrics(
|
||||
|
||||
from synapse.metrics import RegistryProxy
|
||||
|
||||
servers: List[Tuple[WSGIServer, Thread]] = []
|
||||
servers: list[tuple[WSGIServer, Thread]] = []
|
||||
for host in bind_addresses:
|
||||
logger.info("Starting metrics listener on %s:%d", host, port)
|
||||
server, thread = start_http_server_prometheus(
|
||||
@@ -346,7 +349,7 @@ def listen_manhole(
|
||||
port: int,
|
||||
manhole_settings: ManholeConfig,
|
||||
manhole_globals: dict,
|
||||
) -> List[Port]:
|
||||
) -> list[Port]:
|
||||
# twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing
|
||||
# warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so
|
||||
# suppress the warning for now.
|
||||
@@ -371,7 +374,7 @@ def listen_tcp(
|
||||
factory: ServerFactory,
|
||||
reactor: IReactorTCP = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
) -> list[Port]:
|
||||
"""
|
||||
Create a TCP socket for a port and several addresses
|
||||
|
||||
@@ -396,7 +399,7 @@ def listen_unix(
|
||||
factory: ServerFactory,
|
||||
reactor: IReactorUNIX = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
) -> list[Port]:
|
||||
"""
|
||||
Create a UNIX socket for a given path and 'mode' permission
|
||||
|
||||
@@ -420,18 +423,27 @@ def listen_http(
|
||||
max_request_body_size: int,
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> List[Port]:
|
||||
) -> list[Port]:
|
||||
"""
|
||||
Args:
|
||||
listener_config: TODO
|
||||
root_resource: TODO
|
||||
version_string: A string to present for the Server header
|
||||
max_request_body_size: TODO
|
||||
context_factory: TODO
|
||||
reactor: TODO
|
||||
"""
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
site = SynapseSite(
|
||||
"synapse.access.%s.%s"
|
||||
logger_name="synapse.access.%s.%s"
|
||||
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
version_string,
|
||||
site_tag=site_tag,
|
||||
config=listener_config,
|
||||
resource=root_resource,
|
||||
server_version_string=version_string,
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
hs=hs,
|
||||
@@ -481,7 +493,7 @@ def listen_ssl(
|
||||
context_factory: IOpenSSLContextFactory,
|
||||
reactor: IReactorSSL = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
) -> list[Port]:
|
||||
"""
|
||||
Create an TLS-over-TCP socket for a port and several addresses
|
||||
|
||||
@@ -535,7 +547,62 @@ def refresh_certificate(hs: "HomeServer") -> None:
|
||||
logger.info("Context factories updated.")
|
||||
|
||||
|
||||
async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
_already_setup_sighup_handling = False
|
||||
"""
|
||||
Marks whether we've already successfully ran `setup_sighup_handling()`.
|
||||
"""
|
||||
|
||||
|
||||
def setup_sighup_handling() -> None:
|
||||
"""
|
||||
Set up SIGHUP handling to call registered callbacks.
|
||||
|
||||
This can be called multiple times safely.
|
||||
"""
|
||||
global _already_setup_sighup_handling
|
||||
# We only need to set things up once per process.
|
||||
if _already_setup_sighup_handling:
|
||||
return
|
||||
|
||||
previous_sighup_handler: Union[
|
||||
Callable[[int, Optional[FrameType]], Any], int, None
|
||||
] = None
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
def handle_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
if callable(previous_sighup_handler):
|
||||
previous_sighup_handler(*args, **kwargs)
|
||||
|
||||
for sighup_callbacks in _instance_id_to_sighup_callbacks_map.values():
|
||||
for func, args, kwargs in sighup_callbacks:
|
||||
func(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
# We defer running the sighup handlers until next reactor tick. This
|
||||
# is so that we're in a sane state, e.g. flushing the logs may fail
|
||||
# if the sighup happens in the middle of writing a log entry.
|
||||
def run_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# `callFromThread` should be "signal safe" as well as thread
|
||||
# safe.
|
||||
reactor.callFromThread(handle_sighup, *args, **kwargs)
|
||||
|
||||
# Register for the SIGHUP signal, chaining any existing handler as there can
|
||||
# only be one handler per signal and we don't want to clobber any existing
|
||||
# handlers (like the `multi_synapse` shard process in the context of Synapse Pro
|
||||
# for small hosts)
|
||||
previous_sighup_handler = signal.signal(signal.SIGHUP, run_sighup)
|
||||
|
||||
_already_setup_sighup_handling = True
|
||||
|
||||
|
||||
async def start(hs: "HomeServer", *, freeze: bool = True) -> None:
|
||||
"""
|
||||
Start a Synapse server or worker.
|
||||
|
||||
@@ -574,49 +641,23 @@ async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
name="gai_resolver", server_name=server_name, threadpool=resolver_threadpool
|
||||
)
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
def handle_sighup(*args: Any, **kwargs: Any) -> "defer.Deferred[None]":
|
||||
async def _handle_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for sighup_callbacks in _instance_id_to_sighup_callbacks_map.values():
|
||||
for func, args, kwargs in sighup_callbacks:
|
||||
func(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
# It's okay to ignore the linter error here and call
|
||||
# `run_as_background_process` directly because `_handle_sighup` operates
|
||||
# outside of the scope of a specific `HomeServer` instance and holds no
|
||||
# references to it which would prevent a clean shutdown.
|
||||
return run_as_background_process( # type: ignore[untracked-background-process]
|
||||
"sighup",
|
||||
server_name,
|
||||
_handle_sighup,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# We defer running the sighup handlers until next reactor tick. This
|
||||
# is so that we're in a sane state, e.g. flushing the logs may fail
|
||||
# if the sighup happens in the middle of writing a log entry.
|
||||
def run_sighup(*args: Any, **kwargs: Any) -> None:
|
||||
# `callFromThread` should be "signal safe" as well as thread
|
||||
# safe.
|
||||
reactor.callFromThread(handle_sighup, *args, **kwargs)
|
||||
|
||||
signal.signal(signal.SIGHUP, run_sighup)
|
||||
|
||||
register_sighup(hs.get_instance_id(), refresh_certificate, hs)
|
||||
register_sighup(hs.get_instance_id(), reload_cache_config, hs.config)
|
||||
setup_sighup_handling()
|
||||
register_sighup(hs, refresh_certificate, hs)
|
||||
register_sighup(hs, reload_cache_config, hs.config)
|
||||
|
||||
# Apply the cache config.
|
||||
hs.config.caches.resize_all_caches()
|
||||
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
#
|
||||
# FIXME: It feels a bit strange to validate and block on startup as one of these
|
||||
# OIDC providers could be temporarily unavailable and cause Synapse to be unable
|
||||
# to start.
|
||||
await oidc.load_metadata()
|
||||
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
@@ -674,15 +715,6 @@ async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
if hs.config.worker.run_background_tasks:
|
||||
hs.start_background_tasks()
|
||||
|
||||
# TODO: This should be moved to same pattern we use for other background tasks:
|
||||
# Add to `REQUIRED_ON_BACKGROUND_TASK_STARTUP` and rely on
|
||||
# `start_background_tasks` to start it.
|
||||
await hs.get_common_usage_metrics_manager().setup()
|
||||
|
||||
# TODO: This feels like another pattern that should refactored as one of the
|
||||
# `REQUIRED_ON_BACKGROUND_TASK_STARTUP`
|
||||
start_phone_stats_home(hs)
|
||||
|
||||
if freeze:
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Mapping, Optional, Sequence, Tuple
|
||||
from typing import Mapping, Optional, Sequence
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -64,8 +64,7 @@ from synapse.storage.databases.main.state import StateGroupWorkerStore
|
||||
from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
from synapse.storage.databases.main.tags import TagsWorkerStore
|
||||
from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore
|
||||
from synapse.types import JsonMapping, StateMap
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
from synapse.types import ISynapseReactor, JsonMapping, StateMap
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
logger = logging.getLogger("synapse.app.admin_cmd")
|
||||
@@ -151,7 +150,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
if list(os.listdir(self.base_directory)):
|
||||
raise Exception("Directory must be empty")
|
||||
|
||||
def write_events(self, room_id: str, events: List[EventBase]) -> None:
|
||||
def write_events(self, room_id: str, events: list[EventBase]) -> None:
|
||||
room_directory = os.path.join(self.base_directory, "rooms", room_id)
|
||||
os.makedirs(room_directory, exist_ok=True)
|
||||
events_file = os.path.join(room_directory, "events")
|
||||
@@ -256,7 +255,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
return self.base_directory
|
||||
|
||||
|
||||
def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]:
|
||||
def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Namespace]:
|
||||
parser = argparse.ArgumentParser(description="Synapse Admin Command")
|
||||
HomeServerConfig.add_arguments_to_parser(parser)
|
||||
|
||||
@@ -290,7 +289,21 @@ def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Nam
|
||||
return config, args
|
||||
|
||||
|
||||
def start(config: HomeServerConfig, args: argparse.Namespace) -> None:
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> AdminCmdServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse admin command process.
|
||||
|
||||
Args:
|
||||
config: The configuration for the homeserver.
|
||||
reactor: Optionally provide a reactor to use. Can be useful in different
|
||||
scenarios that you want control over the reactor, such as tests.
|
||||
|
||||
Returns:
|
||||
A homeserver instance.
|
||||
"""
|
||||
if config.worker.worker_app is not None:
|
||||
assert config.worker.worker_app == "synapse.app.admin_cmd"
|
||||
|
||||
@@ -313,34 +326,63 @@ def start(config: HomeServerConfig, args: argparse.Namespace) -> None:
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
|
||||
ss = AdminCmdServer(
|
||||
admin_command_server = AdminCmdServer(
|
||||
config.server.server_name,
|
||||
config=config,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
return admin_command_server
|
||||
|
||||
ss.setup()
|
||||
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
# down the reactor when the deferreds resolve and setting the return value.
|
||||
# We also make sure that `_base.start` gets run before we actually run the
|
||||
# command.
|
||||
def setup(admin_command_server: AdminCmdServer) -> None:
|
||||
"""
|
||||
Setup a `AdminCmdServer` instance.
|
||||
|
||||
async def run() -> None:
|
||||
with LoggingContext(name="command", server_name=config.server.server_name):
|
||||
await _base.start(ss)
|
||||
await args.func(ss, args)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
"synapse-admin-cmd",
|
||||
config,
|
||||
run_command=lambda: task.react(lambda _reactor: defer.ensureDeferred(run())),
|
||||
Args:
|
||||
admin_command_server: The homeserver to setup.
|
||||
"""
|
||||
setup_logging(
|
||||
admin_command_server, admin_command_server.config, use_worker_options=True
|
||||
)
|
||||
|
||||
admin_command_server.setup()
|
||||
|
||||
|
||||
async def start(admin_command_server: AdminCmdServer, args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Should be called once the reactor is running.
|
||||
|
||||
Args:
|
||||
admin_command_server: The homeserver to setup.
|
||||
args: Command line arguments.
|
||||
"""
|
||||
# This needs a logcontext unlike other entrypoints because we're not using
|
||||
# `register_start(...)` to run this function.
|
||||
with LoggingContext(name="start", server_name=admin_command_server.hostname):
|
||||
# We make sure that `_base.start` gets run before we actually run the command.
|
||||
await _base.start(admin_command_server)
|
||||
# Run the command
|
||||
await args.func(admin_command_server, args)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
homeserver_config, args = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
# Initialize and setup the homeserver
|
||||
admin_command_server = create_homeserver(homeserver_config)
|
||||
setup(admin_command_server)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
"synapse-admin-cmd",
|
||||
admin_command_server.config,
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
# down the reactor when the deferreds resolve and setting the return value.
|
||||
run_command=lambda: task.react(
|
||||
lambda _reactor: defer.ensureDeferred(start(admin_command_server, args))
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
homeserver_config, args = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config, args)
|
||||
main()
|
||||
|
||||
@@ -19,16 +19,11 @@
|
||||
#
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.app.generic_worker import main as worker_main
|
||||
|
||||
|
||||
def main() -> None:
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
worker_main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user