mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
62 Commits
patch-1
...
erikj/bett
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40878330fe | ||
|
|
aea3a11ae9 | ||
|
|
aa2981f63f | ||
|
|
9c91873922 | ||
|
|
41fbe387d6 | ||
|
|
90cc9e5b29 | ||
|
|
516fd891ee | ||
|
|
0ef2315a99 | ||
|
|
59710437e4 | ||
|
|
9985aa6821 | ||
|
|
31742149d4 | ||
|
|
947e8a6cb0 | ||
|
|
0d4d00a07c | ||
|
|
3166445514 | ||
|
|
922656fc77 | ||
|
|
30c50e0240 | ||
|
|
48a90c697b | ||
|
|
47773232b0 | ||
|
|
2e92b718d5 | ||
|
|
646cb6ff24 | ||
|
|
0fe9e1f7da | ||
|
|
ae181233aa | ||
|
|
20c9e19519 | ||
|
|
55b0aa847a | ||
|
|
074ef4d75f | ||
|
|
301c9771c4 | ||
|
|
800a5b6ef3 | ||
|
|
8c667759ad | ||
|
|
14e9ab19be | ||
|
|
20c8991a94 | ||
|
|
dcae2b4ba4 | ||
|
|
98f57ea3f2 | ||
|
|
f5b6005559 | ||
|
|
47f3870894 | ||
|
|
6d64f1b2b8 | ||
|
|
1d47532310 | ||
|
|
09f0957b36 | ||
|
|
803f05f60c | ||
|
|
c8e0bed426 | ||
|
|
28f5ad07d3 | ||
|
|
f0d6f14047 | ||
|
|
3a196b3227 | ||
|
|
fbb2573525 | ||
|
|
259442fa4c | ||
|
|
fe4719a268 | ||
|
|
3a30846bd0 | ||
|
|
db4e321219 | ||
|
|
657b8cc75c | ||
|
|
a2a543fd12 | ||
|
|
89f1092284 | ||
|
|
4ffed6330f | ||
|
|
e363881592 | ||
|
|
d40878451c | ||
|
|
892cbd0624 | ||
|
|
106cfd4b39 | ||
|
|
0a6ae6fe4c | ||
|
|
13a3987929 | ||
|
|
680f60102b | ||
|
|
3e51b370c5 | ||
|
|
9b8597e431 | ||
|
|
4d10a8fb18 | ||
|
|
1f8f991d51 |
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
|
||||
6
.github/workflows/docs.yaml
vendored
6
.github/workflows/docs.yaml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
|
||||
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
@@ -208,7 +208,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
with:
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -344,7 +344,7 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -386,7 +386,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
@@ -498,7 +498,7 @@ jobs:
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
@@ -642,7 +642,7 @@ jobs:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
@@ -674,7 +674,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
95
CHANGES.md
95
CHANGES.md
@@ -1,3 +1,98 @@
|
||||
# Synapse 1.106.0rc1 (2024-04-25)
|
||||
|
||||
### Features
|
||||
|
||||
- Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819))
|
||||
- Implement the rendezvous mechanism described by [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108). ([\#17056](https://github.com/element-hq/synapse/issues/17056))
|
||||
- Support delegating the rendezvous mechanism described [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108) to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Add validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. ([\#16920](https://github.com/element-hq/synapse/issues/16920))
|
||||
- Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. ([\#16923](https://github.com/element-hq/synapse/issues/16923))
|
||||
- Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. ([\#16943](https://github.com/element-hq/synapse/issues/16943))
|
||||
- Redact membership events if the user requested erasure upon deactivating. ([\#17076](https://github.com/element-hq/synapse/issues/17076))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Add a prompt in the contributing guide to manually configure icu4c. ([\#17069](https://github.com/element-hq/synapse/issues/17069))
|
||||
- Clarify what part of message retention is still experimental. ([\#17099](https://github.com/element-hq/synapse/issues/17099))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). ([\#17032](https://github.com/element-hq/synapse/issues/17032), [\#17096](https://github.com/element-hq/synapse/issues/17096))
|
||||
- Fix mypy with latest Twisted release. ([\#17036](https://github.com/element-hq/synapse/issues/17036))
|
||||
- Bump minimum supported Rust version to 1.66.0. ([\#17079](https://github.com/element-hq/synapse/issues/17079))
|
||||
- Add helpers to transform Twisted requests to Rust http Requests/Responses. ([\#17081](https://github.com/element-hq/synapse/issues/17081))
|
||||
- Fix type annotation for `visited_chains` after `mypy` upgrade. ([\#17125](https://github.com/element-hq/synapse/issues/17125))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump anyhow from 1.0.81 to 1.0.82. ([\#17095](https://github.com/element-hq/synapse/issues/17095))
|
||||
* Bump peaceiris/actions-gh-pages from 3.9.3 to 4.0.0. ([\#17087](https://github.com/element-hq/synapse/issues/17087))
|
||||
* Bump peaceiris/actions-mdbook from 1.2.0 to 2.0.0. ([\#17089](https://github.com/element-hq/synapse/issues/17089))
|
||||
* Bump pyasn1-modules from 0.3.0 to 0.4.0. ([\#17093](https://github.com/element-hq/synapse/issues/17093))
|
||||
* Bump pygithub from 2.2.0 to 2.3.0. ([\#17092](https://github.com/element-hq/synapse/issues/17092))
|
||||
* Bump ruff from 0.3.5 to 0.3.7. ([\#17094](https://github.com/element-hq/synapse/issues/17094))
|
||||
* Bump sigstore/cosign-installer from 3.4.0 to 3.5.0. ([\#17088](https://github.com/element-hq/synapse/issues/17088))
|
||||
* Bump twine from 4.0.2 to 5.0.0. ([\#17091](https://github.com/element-hq/synapse/issues/17091))
|
||||
* Bump types-pillow from 10.2.0.20240406 to 10.2.0.20240415. ([\#17090](https://github.com/element-hq/synapse/issues/17090))
|
||||
|
||||
# Synapse 1.105.1 (2024-04-23)
|
||||
|
||||
## Security advisory
|
||||
|
||||
The following issues are fixed in 1.105.1.
|
||||
|
||||
- [GHSA-3h7q-rfh9-xm4v](https://github.com/element-hq/synapse/security/advisories/GHSA-3h7q-rfh9-xm4v) / [CVE-2024-31208](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-31208) — High Severity
|
||||
|
||||
Weakness in auth chain indexing allows DoS from remote room members through disk fill and high CPU usage.
|
||||
|
||||
See the advisories for more details. If you have any questions, email security@element.io.
|
||||
|
||||
|
||||
|
||||
# Synapse 1.105.0 (2024-04-16)
|
||||
|
||||
No significant changes since 1.105.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.105.0rc1 (2024-04-11)
|
||||
|
||||
### Features
|
||||
|
||||
- Stabilize support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which clarifies the interaction of push rules and account data. Contributed by @clokep. ([\#17022](https://github.com/element-hq/synapse/issues/17022))
|
||||
- Stabilize support for [MSC3981](https://github.com/matrix-org/matrix-spec-proposals/pull/3981): `/relations` recursion. Contributed by @clokep. ([\#17023](https://github.com/element-hq/synapse/issues/17023))
|
||||
- Add support for moving `/pushrules` off of main process. ([\#17037](https://github.com/element-hq/synapse/issues/17037), [\#17038](https://github.com/element-hq/synapse/issues/17038))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations. ([\#16930](https://github.com/element-hq/synapse/issues/16930), [\#16932](https://github.com/element-hq/synapse/issues/16932), [\#16942](https://github.com/element-hq/synapse/issues/16942), [\#17064](https://github.com/element-hq/synapse/issues/17064), [\#17065](https://github.com/element-hq/synapse/issues/17065), [\#17066](https://github.com/element-hq/synapse/issues/17066))
|
||||
- Fix server notice rooms not always being created as unencrypted rooms, even when `encryption_enabled_by_default_for_room_type` is in use (server notices are always unencrypted). ([\#17033](https://github.com/element-hq/synapse/issues/17033))
|
||||
- Fix the `.m.rule.encrypted_room_one_to_one` and `.m.rule.room_one_to_one` default underride push rules being in the wrong order. Contributed by @Sumpy1. ([\#17043](https://github.com/element-hq/synapse/issues/17043))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Refactor auth chain fetching to reduce duplication. ([\#17044](https://github.com/element-hq/synapse/issues/17044))
|
||||
- Improve database performance by adding a missing index to `access_tokens.refresh_token_id`. ([\#17045](https://github.com/element-hq/synapse/issues/17045), [\#17054](https://github.com/element-hq/synapse/issues/17054))
|
||||
- Improve database performance by reducing number of receipts fetched when sending push notifications. ([\#17049](https://github.com/element-hq/synapse/issues/17049))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump packaging from 23.2 to 24.0. ([\#17027](https://github.com/element-hq/synapse/issues/17027))
|
||||
* Bump regex from 1.10.3 to 1.10.4. ([\#17028](https://github.com/element-hq/synapse/issues/17028))
|
||||
* Bump ruff from 0.3.2 to 0.3.5. ([\#17060](https://github.com/element-hq/synapse/issues/17060))
|
||||
* Bump serde_json from 1.0.114 to 1.0.115. ([\#17041](https://github.com/element-hq/synapse/issues/17041))
|
||||
* Bump types-pillow from 10.2.0.20240125 to 10.2.0.20240406. ([\#17061](https://github.com/element-hq/synapse/issues/17061))
|
||||
* Bump types-requests from 2.31.0.20240125 to 2.31.0.20240406. ([\#17063](https://github.com/element-hq/synapse/issues/17063))
|
||||
* Bump typing-extensions from 4.9.0 to 4.11.0. ([\#17062](https://github.com/element-hq/synapse/issues/17062))
|
||||
|
||||
# Synapse 1.104.0 (2024-04-02)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
276
Cargo.lock
generated
276
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.81"
|
||||
version = "1.0.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
|
||||
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -29,6 +29,12 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
@@ -53,12 +59,33 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
@@ -71,15 +98,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.5"
|
||||
version = "0.10.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.6"
|
||||
@@ -90,6 +123,43 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"wasi",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"headers-core",
|
||||
"http",
|
||||
"httpdate",
|
||||
"mime",
|
||||
"sha1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers-core"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
|
||||
dependencies = [
|
||||
"http",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
@@ -102,6 +172,23 @@ version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpdate"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.4"
|
||||
@@ -114,6 +201,15 @@ version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
@@ -122,9 +218,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.135"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
@@ -157,6 +253,12 @@ dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mime"
|
||||
version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.15.0"
|
||||
@@ -192,6 +294,12 @@ version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.76"
|
||||
@@ -295,6 +403,36 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
@@ -306,9 +444,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.10.3"
|
||||
version = "1.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
|
||||
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -347,18 +485,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.197"
|
||||
version = "1.0.198"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
|
||||
checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.197"
|
||||
version = "1.0.198"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
|
||||
checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -367,15 +505,37 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.114"
|
||||
version = "1.0.116"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
|
||||
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
@@ -404,16 +564,23 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
"blake2",
|
||||
"bytes",
|
||||
"headers",
|
||||
"hex",
|
||||
"http",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"mime",
|
||||
"pyo3",
|
||||
"pyo3-log",
|
||||
"pythonize",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"ulid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -428,6 +595,17 @@ version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||
|
||||
[[package]]
|
||||
name = "ulid"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.5"
|
||||
@@ -446,6 +624,76 @@ version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||
|
||||
[[package]]
|
||||
name = "web-time"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.36.1"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations.
|
||||
@@ -1 +0,0 @@
|
||||
Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations.
|
||||
@@ -1 +0,0 @@
|
||||
Fix various long-standing bugs which could cause incorrect state to be returned from `/sync` in certain situations.
|
||||
1
changelog.d/17000.bugfix
Normal file
1
changelog.d/17000.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fixed search feature of Element Android on homesevers using SQLite by returning search terms as search highlights.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for moving `/pushrules` off of main process.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for moving `/pushrules` off of main process.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor auth chain fetching to reduce duplication.
|
||||
@@ -1 +0,0 @@
|
||||
Improve database performance by adding a missing index to `access_tokens.refresh_token_id`.
|
||||
@@ -1 +0,0 @@
|
||||
Improve database performance by reducing number of receipts fetched when sending push notifications.
|
||||
1
changelog.d/17071.doc
Normal file
1
changelog.d/17071.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update event_cache_size and global_factor configurations documentation.
|
||||
1
changelog.d/17078.bugfix
Normal file
1
changelog.d/17078.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
For MSC3266 room summaries, support queries at the recommended endpoint of `/_matrix/client/unstable/im.nheko.summary/summary/{roomIdOrAlias}`. The existing endpoint of `/_matrix/client/unstable/im.nheko.summary/rooms/{roomIdOrAlias}/summary` is deprecated.
|
||||
1
changelog.d/17083.misc
Normal file
1
changelog.d/17083.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve DB usage when fetching related events.
|
||||
1
changelog.d/17084.doc
Normal file
1
changelog.d/17084.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add RuntimeDirectory to example matrix-synapse.service systemd unit.
|
||||
1
changelog.d/17116.doc
Normal file
1
changelog.d/17116.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update enable_notifs configuration documentation.
|
||||
1
changelog.d/17121.bugfix
Normal file
1
changelog.d/17121.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Improve error message for cross signing reset with MSC3861 enabled.
|
||||
24
debian/changelog
vendored
24
debian/changelog
vendored
@@ -1,3 +1,27 @@
|
||||
matrix-synapse-py3 (1.106.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.106.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Apr 2024 15:54:59 +0100
|
||||
|
||||
matrix-synapse-py3 (1.105.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.105.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Apr 2024 15:56:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.105.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.105.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Apr 2024 15:53:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.105.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.105.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Apr 2024 12:15:49 +0100
|
||||
|
||||
matrix-synapse-py3 (1.104.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.104.0.
|
||||
|
||||
@@ -102,6 +102,8 @@ experimental_features:
|
||||
msc3391_enabled: true
|
||||
# Filtering /messages by relation type.
|
||||
msc3874_enabled: true
|
||||
# no UIA for x-signing upload for the first time
|
||||
msc3967_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
@@ -86,6 +86,8 @@ poetry install --extras all
|
||||
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||
that the `poetry install` step completed cleanly.
|
||||
|
||||
For OSX users, be sure to set `PKG_CONFIG_PATH` to support `icu4c`. Run `brew info icu4c` for more details.
|
||||
|
||||
## Running Synapse via poetry
|
||||
|
||||
To start a local instance of Synapse in the locked poetry environment, create a config file:
|
||||
|
||||
@@ -7,8 +7,10 @@ follow the semantics described in
|
||||
and allow server and room admins to configure how long messages should
|
||||
be kept in a homeserver's database before being purged from it.
|
||||
**Please note that, as this feature isn't part of the Matrix
|
||||
specification yet, this implementation is to be considered as
|
||||
experimental.**
|
||||
specification yet, the use of `m.room.retention` events for per-room
|
||||
retention policies is to be considered as experimental. However, the use
|
||||
of a default message retention policy is considered a stable feature
|
||||
in Synapse.**
|
||||
|
||||
A message retention policy is mainly defined by its `max_lifetime`
|
||||
parameter, which defines how long a message can be kept around after
|
||||
|
||||
@@ -9,6 +9,7 @@ ReloadPropagatedFrom=matrix-synapse.target
|
||||
Type=notify
|
||||
NotifyAccess=main
|
||||
User=matrix-synapse
|
||||
RuntimeDirectory=synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||
|
||||
@@ -676,8 +676,8 @@ This setting has the following sub-options:
|
||||
trailing 's'.
|
||||
* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email
|
||||
subjects. It defaults to 'Matrix'.
|
||||
* `enable_notifs`: Set to true to enable sending emails for messages that the user
|
||||
has missed. Disabled by default.
|
||||
* `enable_notifs`: Set to true to allow users to receive e-mail notifications. If this is not set,
|
||||
users can configure e-mail notifications but will not receive them. Disabled by default.
|
||||
* `notif_for_new_users`: Set to false to disable automatic subscription to email
|
||||
notifications for new users. Enabled by default.
|
||||
* `notif_delay_before_mail`: The time to wait before emailing about a notification.
|
||||
@@ -1317,6 +1317,12 @@ Options related to caching.
|
||||
The number of events to cache in memory. Defaults to 10K. Like other caches,
|
||||
this is affected by `caches.global_factor` (see below).
|
||||
|
||||
For example, the default is 10K and the global_factor default is 0.5.
|
||||
|
||||
Since 10K * 0.5 is 5K then the event cache size will be 5K.
|
||||
|
||||
The cache affected by this configuration is named as "*getEvent*".
|
||||
|
||||
Note that this option is not part of the `caches` section.
|
||||
|
||||
Example configuration:
|
||||
@@ -1342,6 +1348,8 @@ number of entries that can be stored.
|
||||
|
||||
Defaults to 0.5, which will halve the size of all caches.
|
||||
|
||||
Note that changing this value also affects the HTTP connection pool.
|
||||
|
||||
* `per_cache_factors`: A dictionary of cache name to cache factor for that individual
|
||||
cache. Overrides the global cache factor for a given cache.
|
||||
|
||||
|
||||
@@ -232,7 +232,7 @@ information.
|
||||
^/_matrix/client/v1/rooms/.*/hierarchy$
|
||||
^/_matrix/client/(v1|unstable)/rooms/.*/relations/
|
||||
^/_matrix/client/v1/rooms/.*/threads$
|
||||
^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$
|
||||
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
||||
^/_matrix/client/(r0|v3|unstable)/devices$
|
||||
|
||||
290
poetry.lock
generated
290
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "alabaster"
|
||||
@@ -1602,13 +1602,13 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "23.2"
|
||||
version = "24.0"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
|
||||
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
|
||||
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
|
||||
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1638,13 +1638,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.29"
|
||||
version = "8.13.35"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "phonenumbers-8.13.29-py2.py3-none-any.whl", hash = "sha256:9d7863dc8a37e8127f3c9dde65be93a5b46649b779184f8b0a85bdd043b0b293"},
|
||||
{file = "phonenumbers-8.13.29.tar.gz", hash = "sha256:a6c85b53e28410aba2f312255cc8015f384a43e7e241ffb84ca5cde80f094cdf"},
|
||||
{file = "phonenumbers-8.13.35-py2.py3-none-any.whl", hash = "sha256:58286a8e617bd75f541e04313b28c36398be6d4443a778c85e9617a93c391310"},
|
||||
{file = "phonenumbers-8.13.35.tar.gz", hash = "sha256:64f061a967dcdae11e1c59f3688649e697b897110a33bb74d5a69c3e35321245"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1848,17 +1848,17 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1-modules"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
description = "A collection of ASN.1-based protocols modules"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
|
||||
{file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
|
||||
{file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
|
||||
{file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pyasn1 = ">=0.4.6,<0.6.0"
|
||||
pyasn1 = ">=0.4.6,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
@@ -1873,18 +1873,18 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.6.4"
|
||||
version = "2.7.0"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"},
|
||||
{file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"},
|
||||
{file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"},
|
||||
{file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.16.3"
|
||||
pydantic-core = "2.18.1"
|
||||
typing-extensions = ">=4.6.1"
|
||||
|
||||
[package.extras]
|
||||
@@ -1892,90 +1892,90 @@ email = ["email-validator (>=2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.16.3"
|
||||
description = ""
|
||||
version = "2.18.1"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"},
|
||||
{file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"},
|
||||
{file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"},
|
||||
{file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"},
|
||||
{file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"},
|
||||
{file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"},
|
||||
{file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"},
|
||||
{file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"},
|
||||
{file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"},
|
||||
{file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"},
|
||||
{file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"},
|
||||
{file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"},
|
||||
{file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"},
|
||||
{file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"},
|
||||
{file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"},
|
||||
{file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"},
|
||||
{file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"},
|
||||
{file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"},
|
||||
{file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"},
|
||||
{file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"},
|
||||
{file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"},
|
||||
{file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"},
|
||||
{file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"},
|
||||
{file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"},
|
||||
{file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"},
|
||||
{file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"},
|
||||
{file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"},
|
||||
{file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"},
|
||||
{file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"},
|
||||
{file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"},
|
||||
{file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"},
|
||||
{file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"},
|
||||
{file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"},
|
||||
{file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"},
|
||||
{file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"},
|
||||
{file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"},
|
||||
{file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"},
|
||||
{file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"},
|
||||
{file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"},
|
||||
{file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"},
|
||||
{file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1983,13 +1983,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.2.0"
|
||||
version = "2.3.0"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"},
|
||||
{file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"},
|
||||
{file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"},
|
||||
{file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2016,12 +2016,12 @@ plugins = ["importlib-metadata"]
|
||||
|
||||
[[package]]
|
||||
name = "pyicu"
|
||||
version = "2.12"
|
||||
version = "2.13"
|
||||
description = "Python extension wrapping the ICU C++ API"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "PyICU-2.12.tar.gz", hash = "sha256:bd7ab5efa93ad692e6daa29cd249364e521218329221726a113ca3cb281c8611"},
|
||||
{file = "PyICU-2.13.tar.gz", hash = "sha256:d481be888975df3097c2790241bbe8518f65c9676a74957cdbe790e559c828f6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2444,28 +2444,28 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.3.2"
|
||||
version = "0.3.7"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"},
|
||||
{file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"},
|
||||
{file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"},
|
||||
{file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"},
|
||||
{file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"},
|
||||
{file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"},
|
||||
{file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"},
|
||||
{file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"},
|
||||
{file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"},
|
||||
{file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"},
|
||||
{file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"},
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
|
||||
{file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
|
||||
{file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
|
||||
{file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
|
||||
{file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2954,13 +2954,13 @@ docs = ["sphinx (<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "4.0.2"
|
||||
version = "5.0.0"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
|
||||
{file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
|
||||
{file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"},
|
||||
{file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3051,15 +3051,18 @@ twisted = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-bleach"
|
||||
version = "6.1.0.1"
|
||||
version = "6.1.0.20240331"
|
||||
description = "Typing stubs for bleach"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-bleach-6.1.0.1.tar.gz", hash = "sha256:1e43c437e734a90efe4f40ebfe831057599568d3b275939ffbd6094848a18a27"},
|
||||
{file = "types_bleach-6.1.0.1-py3-none-any.whl", hash = "sha256:f83f80e0709f13d809a9c79b958a1089df9b99e68059287beb196e38967e4ddf"},
|
||||
{file = "types-bleach-6.1.0.20240331.tar.gz", hash = "sha256:2ee858a84fb06fc2225ff56ba2f7f6c88b65638659efae0d7bfd6b24a1b5a524"},
|
||||
{file = "types_bleach-6.1.0.20240331-py3-none-any.whl", hash = "sha256:399bc59bfd20a36a56595f13f805e56c8a08e5a5c07903e5cf6fafb5a5107dd4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-html5lib = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2.20240106"
|
||||
@@ -3071,6 +3074,17 @@ files = [
|
||||
{file = "types_commonmark-0.9.2.20240106-py3-none-any.whl", hash = "sha256:606d9de1e3a96cab0b1c0b6cccf4df099116148d1d864d115fde2e27ad6877c3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-html5lib"
|
||||
version = "1.1.11.20240228"
|
||||
description = "Typing stubs for html5lib"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-html5lib-1.1.11.20240228.tar.gz", hash = "sha256:22736b7299e605ec4ba539d48691e905fd0c61c3ea610acc59922232dc84cede"},
|
||||
{file = "types_html5lib-1.1.11.20240228-py3-none-any.whl", hash = "sha256:af5de0125cb0fe5667543b158db83849b22e25c0e36c9149836b095548bf1020"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.21.0.20240311"
|
||||
@@ -3109,13 +3123,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.2.0.20240125"
|
||||
version = "10.2.0.20240415"
|
||||
description = "Typing stubs for Pillow"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-Pillow-10.2.0.20240125.tar.gz", hash = "sha256:c449b2c43b9fdbe0494a7b950e6b39a4e50516091213fec24ef3f33c1d017717"},
|
||||
{file = "types_Pillow-10.2.0.20240125-py3-none-any.whl", hash = "sha256:322dbae32b4b7918da5e8a47c50ac0f24b0aa72a804a23857620f2722b03c858"},
|
||||
{file = "types-Pillow-10.2.0.20240415.tar.gz", hash = "sha256:dd6058027639bcdc66ba78b228cc25fdae42524c2150c78c804da427e7e76e70"},
|
||||
{file = "types_Pillow-10.2.0.20240415-py3-none-any.whl", hash = "sha256:f933332b7e96010bae9b9cf82a4c9979ff0c270d63f5c5bbffb2d789b85cd00b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3156,13 +3170,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.31.0.20240125"
|
||||
version = "2.31.0.20240406"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-requests-2.31.0.20240125.tar.gz", hash = "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5"},
|
||||
{file = "types_requests-2.31.0.20240125-py3-none-any.whl", hash = "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1"},
|
||||
{file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"},
|
||||
{file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3181,13 +3195,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.9.0"
|
||||
version = "4.11.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
|
||||
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
|
||||
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
|
||||
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3451,4 +3465,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8.0"
|
||||
content-hash = "b510fa05f4ea33194bec079f5d04ebb3f9ffbb5c1ea96a0341d57ba770ef81e6"
|
||||
content-hash = "1951f2b4623138d47db08a405edd970e67599d05804bb459af21a085e1665f69"
|
||||
|
||||
@@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.104.0"
|
||||
version = "1.106.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
@@ -321,7 +321,7 @@ all = [
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.7.0"
|
||||
ruff = "0.3.2"
|
||||
ruff = "0.3.7"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
rust-version = "1.66.0"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
@@ -23,8 +23,13 @@ name = "synapse.synapse_rust"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.63"
|
||||
base64 = "0.21.7"
|
||||
bytes = "1.6.0"
|
||||
headers = "0.4.0"
|
||||
http = "1.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
mime = "0.3.17"
|
||||
pyo3 = { version = "0.20.0", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
@@ -34,8 +39,10 @@ pyo3 = { version = "0.20.0", features = [
|
||||
pyo3-log = "0.9.0"
|
||||
pythonize = "0.20.0"
|
||||
regex = "1.6.0"
|
||||
sha2 = "0.10.8"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
serde_json = "1.0.85"
|
||||
ulid = "1.1.2"
|
||||
|
||||
[features]
|
||||
extension-module = ["pyo3/extension-module"]
|
||||
|
||||
60
rust/src/errors.rs
Normal file
60
rust/src/errors.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
#![allow(clippy::new_ret_no_self)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use http::{HeaderMap, StatusCode};
|
||||
use pyo3::{exceptions::PyValueError, import_exception};
|
||||
|
||||
import_exception!(synapse.api.errors, SynapseError);
|
||||
|
||||
impl SynapseError {
|
||||
pub fn new(
|
||||
code: StatusCode,
|
||||
message: String,
|
||||
errcode: &'static str,
|
||||
additional_fields: Option<HashMap<String, String>>,
|
||||
headers: Option<HeaderMap>,
|
||||
) -> pyo3::PyErr {
|
||||
// Transform the HeaderMap into a HashMap<String, String>
|
||||
let headers = if let Some(headers) = headers {
|
||||
let mut map = HashMap::with_capacity(headers.len());
|
||||
for (key, value) in headers.iter() {
|
||||
let Ok(value) = value.to_str() else {
|
||||
// This should never happen, but we don't want to panic in case it does
|
||||
return PyValueError::new_err(
|
||||
"Could not construct SynapseError: header value is not valid ASCII",
|
||||
);
|
||||
};
|
||||
|
||||
map.insert(key.as_str().to_owned(), value.to_owned());
|
||||
}
|
||||
Some(map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
SynapseError::new_err((code.as_u16(), message, errcode, additional_fields, headers))
|
||||
}
|
||||
}
|
||||
|
||||
import_exception!(synapse.api.errors, NotFoundError);
|
||||
|
||||
impl NotFoundError {
|
||||
pub fn new() -> pyo3::PyErr {
|
||||
NotFoundError::new_err(())
|
||||
}
|
||||
}
|
||||
165
rust/src/http.rs
Normal file
165
rust/src/http.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||
use headers::{Header, HeaderMapExt};
|
||||
use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri};
|
||||
use pyo3::{
|
||||
exceptions::PyValueError,
|
||||
types::{PyBytes, PySequence, PyTuple},
|
||||
PyAny, PyResult,
|
||||
};
|
||||
|
||||
use crate::errors::SynapseError;
|
||||
|
||||
/// Read a file-like Python object by chunks
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if calling the `read` on the Python object failed
|
||||
fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult<Bytes> {
|
||||
let mut buf = BytesMut::new();
|
||||
loop {
|
||||
let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?;
|
||||
if bytes.as_bytes().is_empty() {
|
||||
return Ok(buf.into());
|
||||
}
|
||||
buf.put(bytes.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform a Twisted `IRequest` to an [`http::Request`]
|
||||
///
|
||||
/// It uses the following members of `IRequest`:
|
||||
/// - `content`, which is expected to be a file-like object with a `read` method
|
||||
/// - `uri`, which is expected to be a valid URI as `bytes`
|
||||
/// - `method`, which is expected to be a valid HTTP method as `bytes`
|
||||
/// - `requestHeaders`, which is expected to have a `getAllRawHeaders` method
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||
pub fn http_request_from_twisted(request: &PyAny) -> PyResult<Request<Bytes>> {
|
||||
let content = request.getattr("content")?;
|
||||
let body = read_io_body(content, 4096)?;
|
||||
|
||||
let mut req = Request::new(body);
|
||||
|
||||
let uri: &PyBytes = request.getattr("uri")?.downcast()?;
|
||||
*req.uri_mut() =
|
||||
Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?;
|
||||
|
||||
let method: &PyBytes = request.getattr("method")?.downcast()?;
|
||||
*req.method_mut() = Method::from_bytes(method.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid method"))?;
|
||||
|
||||
let headers_iter = request
|
||||
.getattr("requestHeaders")?
|
||||
.call_method0("getAllRawHeaders")?
|
||||
.iter()?;
|
||||
|
||||
for header in headers_iter {
|
||||
let header = header?;
|
||||
let header: &PyTuple = header.downcast()?;
|
||||
let name: &PyBytes = header.get_item(0)?.downcast()?;
|
||||
let name = HeaderName::from_bytes(name.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid header name"))?;
|
||||
|
||||
let values: &PySequence = header.get_item(1)?.downcast()?;
|
||||
for index in 0..values.len()? {
|
||||
let value: &PyBytes = values.get_item(index)?.downcast()?;
|
||||
let value = HeaderValue::from_bytes(value.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid header value"))?;
|
||||
req.headers_mut().append(name.clone(), value);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(req)
|
||||
}
|
||||
|
||||
/// Send an [`http::Response`] through a Twisted `IRequest`
|
||||
///
|
||||
/// It uses the following members of `IRequest`:
|
||||
///
|
||||
/// - `responseHeaders`, which is expected to have a `addRawHeader(bytes, bytes)` method
|
||||
/// - `setResponseCode(int)` method
|
||||
/// - `write(bytes)` method
|
||||
/// - `finish()` method
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||
pub fn http_response_to_twisted<B>(request: &PyAny, response: Response<B>) -> PyResult<()>
|
||||
where
|
||||
B: Buf,
|
||||
{
|
||||
let (parts, mut body) = response.into_parts();
|
||||
|
||||
request.call_method1("setResponseCode", (parts.status.as_u16(),))?;
|
||||
|
||||
let response_headers = request.getattr("responseHeaders")?;
|
||||
for (name, value) in parts.headers.iter() {
|
||||
response_headers.call_method1("addRawHeader", (name.as_str(), value.as_bytes()))?;
|
||||
}
|
||||
|
||||
while body.remaining() != 0 {
|
||||
let chunk = body.chunk();
|
||||
request.call_method1("write", (chunk,))?;
|
||||
body.advance(chunk.len());
|
||||
}
|
||||
|
||||
request.call_method0("finish")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// An extension trait for [`HeaderMap`] that provides typed access to headers, and throws the
|
||||
/// right python exceptions when the header is missing or fails to parse.
|
||||
///
|
||||
/// [`HeaderMap`]: headers::HeaderMap
|
||||
pub trait HeaderMapPyExt: HeaderMapExt {
|
||||
/// Get a header from the map, returning an error if it is missing or invalid.
|
||||
fn typed_get_required<H>(&self) -> PyResult<H>
|
||||
where
|
||||
H: Header,
|
||||
{
|
||||
self.typed_get_optional::<H>()?.ok_or_else(|| {
|
||||
SynapseError::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!("Missing required header: {}", H::name()),
|
||||
"M_MISSING_PARAM",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a header from the map, returning `None` if it is missing and an error if it is invalid.
|
||||
fn typed_get_optional<H>(&self) -> PyResult<Option<H>>
|
||||
where
|
||||
H: Header,
|
||||
{
|
||||
self.typed_try_get::<H>().map_err(|_| {
|
||||
SynapseError::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!("Invalid header: {}", H::name()),
|
||||
"M_INVALID_PARAM",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HeaderMapExt> HeaderMapPyExt for T {}
|
||||
@@ -3,8 +3,11 @@ use pyo3::prelude::*;
|
||||
use pyo3_log::ResetHandle;
|
||||
|
||||
pub mod acl;
|
||||
pub mod errors;
|
||||
pub mod events;
|
||||
pub mod http;
|
||||
pub mod push;
|
||||
pub mod rendezvous;
|
||||
|
||||
lazy_static! {
|
||||
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
||||
@@ -43,6 +46,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
acl::register_module(py, m)?;
|
||||
push::register_module(py, m)?;
|
||||
events::register_module(py, m)?;
|
||||
rendezvous::register_module(py, m)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -304,12 +304,12 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.room_one_to_one"),
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.encrypted_room_one_to_one"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@@ -320,12 +320,12 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.encrypted_room_one_to_one"),
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.room_one_to_one"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
|
||||
315
rust/src/rendezvous/mod.rs
Normal file
315
rust/src/rendezvous/mod.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*
|
||||
*/
|
||||
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use headers::{
|
||||
AccessControlAllowOrigin, AccessControlExposeHeaders, CacheControl, ContentLength, ContentType,
|
||||
HeaderMapExt, IfMatch, IfNoneMatch, Pragma,
|
||||
};
|
||||
use http::{header::ETAG, HeaderMap, Response, StatusCode, Uri};
|
||||
use mime::Mime;
|
||||
use pyo3::{
|
||||
exceptions::PyValueError, pyclass, pymethods, types::PyModule, Py, PyAny, PyObject, PyResult,
|
||||
Python, ToPyObject,
|
||||
};
|
||||
use ulid::Ulid;
|
||||
|
||||
use self::session::Session;
|
||||
use crate::{
|
||||
errors::{NotFoundError, SynapseError},
|
||||
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
|
||||
};
|
||||
|
||||
mod session;
|
||||
|
||||
// n.b. Because OPTIONS requests are handled by the Python code, we don't need to set Access-Control-Allow-Headers.
|
||||
fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
||||
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
||||
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
||||
headers.typed_insert(Pragma::no_cache());
|
||||
headers.typed_insert(CacheControl::new().with_no_store());
|
||||
headers.typed_insert(session.etag());
|
||||
headers.typed_insert(session.expires());
|
||||
headers.typed_insert(session.last_modified());
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
struct RendezvousHandler {
|
||||
base: Uri,
|
||||
clock: PyObject,
|
||||
sessions: BTreeMap<Ulid, Session>,
|
||||
capacity: usize,
|
||||
max_content_length: u64,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
impl RendezvousHandler {
|
||||
/// Check the input headers of a request which sets data for a session, and return the content type.
|
||||
fn check_input_headers(&self, headers: &HeaderMap) -> PyResult<Mime> {
|
||||
let ContentLength(content_length) = headers.typed_get_required()?;
|
||||
|
||||
if content_length > self.max_content_length {
|
||||
return Err(SynapseError::new(
|
||||
StatusCode::PAYLOAD_TOO_LARGE,
|
||||
"Payload too large".to_owned(),
|
||||
"M_TOO_LARGE",
|
||||
None,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
let content_type: ContentType = headers.typed_get_required()?;
|
||||
|
||||
// Content-Type must be text/plain
|
||||
if content_type != ContentType::text() {
|
||||
return Err(SynapseError::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"Content-Type must be text/plain".to_owned(),
|
||||
"M_INVALID_PARAM",
|
||||
None,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(content_type.into())
|
||||
}
|
||||
|
||||
/// Evict expired sessions and remove the oldest sessions until we're under the capacity.
|
||||
fn evict(&mut self, now: SystemTime) {
|
||||
// First remove all the entries which expired
|
||||
self.sessions.retain(|_, session| !session.expired(now));
|
||||
|
||||
// Then we remove the oldest entires until we're under the limit
|
||||
while self.sessions.len() > self.capacity {
|
||||
self.sessions.pop_first();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl RendezvousHandler {
|
||||
#[new]
|
||||
#[pyo3(signature = (homeserver, /, capacity=100, max_content_length=4*1024, eviction_interval=60*1000, ttl=60*1000))]
|
||||
fn new(
|
||||
py: Python<'_>,
|
||||
homeserver: &PyAny,
|
||||
capacity: usize,
|
||||
max_content_length: u64,
|
||||
eviction_interval: u64,
|
||||
ttl: u64,
|
||||
) -> PyResult<Py<Self>> {
|
||||
let base: String = homeserver
|
||||
.getattr("config")?
|
||||
.getattr("server")?
|
||||
.getattr("public_baseurl")?
|
||||
.extract()?;
|
||||
let base = Uri::try_from(format!("{base}_synapse/client/rendezvous"))
|
||||
.map_err(|_| PyValueError::new_err("Invalid base URI"))?;
|
||||
|
||||
let clock = homeserver.call_method0("get_clock")?.to_object(py);
|
||||
|
||||
// Construct a Python object so that we can get a reference to the
|
||||
// evict method and schedule it to run.
|
||||
let self_ = Py::new(
|
||||
py,
|
||||
Self {
|
||||
base,
|
||||
clock,
|
||||
sessions: BTreeMap::new(),
|
||||
capacity,
|
||||
max_content_length,
|
||||
ttl: Duration::from_millis(ttl),
|
||||
},
|
||||
)?;
|
||||
|
||||
let evict = self_.getattr(py, "_evict")?;
|
||||
homeserver.call_method0("get_clock")?.call_method(
|
||||
"looping_call",
|
||||
(evict, eviction_interval),
|
||||
None,
|
||||
)?;
|
||||
|
||||
Ok(self_)
|
||||
}
|
||||
|
||||
fn _evict(&mut self, py: Python<'_>) -> PyResult<()> {
|
||||
let clock = self.clock.as_ref(py);
|
||||
let now: u64 = clock.call_method0("time_msec")?.extract()?;
|
||||
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||
self.evict(now);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_post(&mut self, py: Python<'_>, twisted_request: &PyAny) -> PyResult<()> {
|
||||
let request = http_request_from_twisted(twisted_request)?;
|
||||
|
||||
let content_type = self.check_input_headers(request.headers())?;
|
||||
|
||||
let clock = self.clock.as_ref(py);
|
||||
let now: u64 = clock.call_method0("time_msec")?.extract()?;
|
||||
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||
|
||||
// We trigger an immediate eviction if we're at 2x the capacity
|
||||
if self.sessions.len() >= self.capacity * 2 {
|
||||
self.evict(now);
|
||||
}
|
||||
|
||||
// Generate a new ULID for the session from the current time.
|
||||
let id = Ulid::from_datetime(now);
|
||||
|
||||
let uri = format!("{base}/{id}", base = self.base);
|
||||
|
||||
let body = request.into_body();
|
||||
|
||||
let session = Session::new(body, content_type, now, self.ttl);
|
||||
|
||||
let response = serde_json::json!({
|
||||
"url": uri,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let mut response = Response::new(response.as_bytes());
|
||||
*response.status_mut() = StatusCode::CREATED;
|
||||
response.headers_mut().typed_insert(ContentType::json());
|
||||
prepare_headers(response.headers_mut(), &session);
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
self.sessions.insert(id, session);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_get(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||
let request = http_request_from_twisted(twisted_request)?;
|
||||
|
||||
let if_none_match: Option<IfNoneMatch> = request.headers().typed_get_optional()?;
|
||||
|
||||
let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?;
|
||||
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||
|
||||
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||
let session = self
|
||||
.sessions
|
||||
.get(&id)
|
||||
.filter(|s| !s.expired(now))
|
||||
.ok_or_else(NotFoundError::new)?;
|
||||
|
||||
if let Some(if_none_match) = if_none_match {
|
||||
if !if_none_match.precondition_passes(&session.etag()) {
|
||||
let mut response = Response::new(Bytes::new());
|
||||
*response.status_mut() = StatusCode::NOT_MODIFIED;
|
||||
prepare_headers(response.headers_mut(), session);
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let mut response = Response::new(session.data());
|
||||
*response.status_mut() = StatusCode::OK;
|
||||
let headers = response.headers_mut();
|
||||
prepare_headers(headers, session);
|
||||
headers.typed_insert(session.content_type());
|
||||
headers.typed_insert(session.content_length());
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_put(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||
let request = http_request_from_twisted(twisted_request)?;
|
||||
|
||||
let content_type = self.check_input_headers(request.headers())?;
|
||||
|
||||
let if_match: IfMatch = request.headers().typed_get_required()?;
|
||||
|
||||
let data = request.into_body();
|
||||
|
||||
let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?;
|
||||
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||
|
||||
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||
let session = self
|
||||
.sessions
|
||||
.get_mut(&id)
|
||||
.filter(|s| !s.expired(now))
|
||||
.ok_or_else(NotFoundError::new)?;
|
||||
|
||||
if !if_match.precondition_passes(&session.etag()) {
|
||||
let mut headers = HeaderMap::new();
|
||||
prepare_headers(&mut headers, session);
|
||||
|
||||
let mut additional_fields = HashMap::with_capacity(1);
|
||||
additional_fields.insert(
|
||||
String::from("org.matrix.msc4108.errcode"),
|
||||
String::from("M_CONCURRENT_WRITE"),
|
||||
);
|
||||
|
||||
return Err(SynapseError::new(
|
||||
StatusCode::PRECONDITION_FAILED,
|
||||
"ETag does not match".to_owned(),
|
||||
"M_UNKNOWN", // Would be M_CONCURRENT_WRITE
|
||||
Some(additional_fields),
|
||||
Some(headers),
|
||||
));
|
||||
}
|
||||
|
||||
session.update(data, content_type, now);
|
||||
|
||||
let mut response = Response::new(Bytes::new());
|
||||
*response.status_mut() = StatusCode::ACCEPTED;
|
||||
prepare_headers(response.headers_mut(), session);
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_delete(&mut self, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||
let _request = http_request_from_twisted(twisted_request)?;
|
||||
|
||||
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||
let _session = self.sessions.remove(&id).ok_or_else(NotFoundError::new)?;
|
||||
|
||||
let mut response = Response::new(Bytes::new());
|
||||
*response.status_mut() = StatusCode::NO_CONTENT;
|
||||
response
|
||||
.headers_mut()
|
||||
.typed_insert(AccessControlAllowOrigin::ANY);
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
let child_module = PyModule::new(py, "rendezvous")?;
|
||||
|
||||
child_module.add_class::<RendezvousHandler>()?;
|
||||
|
||||
m.add_submodule(child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import rendezvous` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.rendezvous", child_module)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
91
rust/src/rendezvous/session.rs
Normal file
91
rust/src/rendezvous/session.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine as _};
|
||||
use bytes::Bytes;
|
||||
use headers::{ContentLength, ContentType, ETag, Expires, LastModified};
|
||||
use mime::Mime;
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
/// A single session, containing data, metadata, and expiry information.
|
||||
pub struct Session {
|
||||
hash: [u8; 32],
|
||||
data: Bytes,
|
||||
content_type: Mime,
|
||||
last_modified: SystemTime,
|
||||
expires: SystemTime,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
/// Create a new session with the given data, content type, and time-to-live.
|
||||
pub fn new(data: Bytes, content_type: Mime, now: SystemTime, ttl: Duration) -> Self {
|
||||
let hash = Sha256::digest(&data).into();
|
||||
Self {
|
||||
hash,
|
||||
data,
|
||||
content_type,
|
||||
expires: now + ttl,
|
||||
last_modified: now,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the session has expired at the given time.
|
||||
pub fn expired(&self, now: SystemTime) -> bool {
|
||||
self.expires <= now
|
||||
}
|
||||
|
||||
/// Update the session with new data, content type, and last modified time.
|
||||
pub fn update(&mut self, data: Bytes, content_type: Mime, now: SystemTime) {
|
||||
self.hash = Sha256::digest(&data).into();
|
||||
self.data = data;
|
||||
self.content_type = content_type;
|
||||
self.last_modified = now;
|
||||
}
|
||||
|
||||
/// Returns the Content-Type header of the session.
|
||||
pub fn content_type(&self) -> ContentType {
|
||||
self.content_type.clone().into()
|
||||
}
|
||||
|
||||
/// Returns the Content-Length header of the session.
|
||||
pub fn content_length(&self) -> ContentLength {
|
||||
ContentLength(self.data.len() as _)
|
||||
}
|
||||
|
||||
/// Returns the ETag header of the session.
|
||||
pub fn etag(&self) -> ETag {
|
||||
let encoded = URL_SAFE_NO_PAD.encode(self.hash);
|
||||
// SAFETY: Base64 encoding is URL-safe, so ETag-safe
|
||||
format!("\"{encoded}\"")
|
||||
.parse()
|
||||
.expect("base64-encoded hash should be URL-safe")
|
||||
}
|
||||
|
||||
/// Returns the Last-Modified header of the session.
|
||||
pub fn last_modified(&self) -> LastModified {
|
||||
self.last_modified.into()
|
||||
}
|
||||
|
||||
/// Returns the Expires header of the session.
|
||||
pub fn expires(&self) -> Expires {
|
||||
self.expires.into()
|
||||
}
|
||||
|
||||
/// Returns the current data stored in the session.
|
||||
pub fn data(&self) -> Bytes {
|
||||
self.data.clone()
|
||||
}
|
||||
}
|
||||
@@ -214,7 +214,7 @@ fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902"
|
||||
test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902 ./tests/msc3967"
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
|
||||
@@ -52,6 +52,7 @@ DEFAULT_SUBJECTS = {
|
||||
"invite_from_person_to_space": "[%(app)s] %(person)s has invited you to join the %(space)s space on %(app)s...",
|
||||
"password_reset": "[%(server_name)s] Password reset",
|
||||
"email_validation": "[%(server_name)s] Validate your email",
|
||||
"email_already_in_use": "[%(server_name)s] Email already in use",
|
||||
}
|
||||
|
||||
LEGACY_TEMPLATE_DIR_WARNING = """
|
||||
@@ -76,6 +77,7 @@ class EmailSubjectConfig:
|
||||
invite_from_person_to_space: str
|
||||
password_reset: str
|
||||
email_validation: str
|
||||
email_already_in_use: str
|
||||
|
||||
|
||||
class EmailConfig(Config):
|
||||
@@ -180,6 +182,12 @@ class EmailConfig(Config):
|
||||
registration_template_text = email_config.get(
|
||||
"registration_template_text", "registration.txt"
|
||||
)
|
||||
already_in_use_template_html = email_config.get(
|
||||
"already_in_use_template_html", "already_in_use.html"
|
||||
)
|
||||
already_in_use_template_text = email_config.get(
|
||||
"already_in_use_template_html", "already_in_use.txt"
|
||||
)
|
||||
add_threepid_template_html = email_config.get(
|
||||
"add_threepid_template_html", "add_threepid.html"
|
||||
)
|
||||
@@ -215,6 +223,8 @@ class EmailConfig(Config):
|
||||
self.email_password_reset_template_text,
|
||||
self.email_registration_template_html,
|
||||
self.email_registration_template_text,
|
||||
self.email_already_in_use_template_html,
|
||||
self.email_already_in_use_template_text,
|
||||
self.email_add_threepid_template_html,
|
||||
self.email_add_threepid_template_text,
|
||||
self.email_password_reset_template_confirmation_html,
|
||||
@@ -230,6 +240,8 @@ class EmailConfig(Config):
|
||||
password_reset_template_text,
|
||||
registration_template_html,
|
||||
registration_template_text,
|
||||
already_in_use_template_html,
|
||||
already_in_use_template_text,
|
||||
add_threepid_template_html,
|
||||
add_threepid_template_text,
|
||||
"password_reset_confirmation.html",
|
||||
|
||||
@@ -393,11 +393,6 @@ class ExperimentalConfig(Config):
|
||||
# MSC3967: Do not require UIA when first uploading cross signing keys
|
||||
self.msc3967_enabled = experimental.get("msc3967_enabled", False)
|
||||
|
||||
# MSC3981: Recurse relations
|
||||
self.msc3981_recurse_relations = experimental.get(
|
||||
"msc3981_recurse_relations", False
|
||||
)
|
||||
|
||||
# MSC3861: Matrix architecture change to delegate authentication via OIDC
|
||||
try:
|
||||
self.msc3861 = MSC3861(**experimental.get("msc3861", {}))
|
||||
@@ -409,11 +404,6 @@ class ExperimentalConfig(Config):
|
||||
# Check that none of the other config options conflict with MSC3861 when enabled
|
||||
self.msc3861.check_config_conflicts(self.root)
|
||||
|
||||
# MSC4010: Do not allow setting m.push_rules account data.
|
||||
self.msc4010_push_rules_account_data = experimental.get(
|
||||
"msc4010_push_rules_account_data", False
|
||||
)
|
||||
|
||||
self.msc4028_push_encrypted_events = experimental.get(
|
||||
"msc4028_push_encrypted_events", False
|
||||
)
|
||||
@@ -421,3 +411,24 @@ class ExperimentalConfig(Config):
|
||||
self.msc4069_profile_inhibit_propagation = experimental.get(
|
||||
"msc4069_profile_inhibit_propagation", False
|
||||
)
|
||||
|
||||
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||
self.msc4108_enabled = experimental.get("msc4108_enabled", False)
|
||||
|
||||
self.msc4108_delegation_endpoint: Optional[str] = experimental.get(
|
||||
"msc4108_delegation_endpoint", None
|
||||
)
|
||||
|
||||
if (
|
||||
self.msc4108_enabled or self.msc4108_delegation_endpoint is not None
|
||||
) and not self.msc3861.enabled:
|
||||
raise ConfigError(
|
||||
"MSC4108 requires MSC3861 to be enabled",
|
||||
("experimental", "msc4108_delegation_endpoint"),
|
||||
)
|
||||
|
||||
if self.msc4108_delegation_endpoint is not None and self.msc4108_enabled:
|
||||
raise ConfigError(
|
||||
"You cannot have MSC4108 both enabled and delegated at the same time",
|
||||
("experimental", "msc4108_delegation_endpoint"),
|
||||
)
|
||||
|
||||
@@ -261,11 +261,22 @@ class DeactivateAccountHandler:
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
rooms_for_user = await self.store.get_rooms_for_user(user_id)
|
||||
requester = create_requester(user, authenticated_entity=self._server_name)
|
||||
should_erase = await self.store.is_user_erased(user_id)
|
||||
|
||||
for room_id in rooms_for_user:
|
||||
logger.info("User parter parting %r from %r", user_id, room_id)
|
||||
try:
|
||||
# Before parting the user, redact all membership events if requested
|
||||
if should_erase:
|
||||
event_ids = await self.store.get_membership_event_ids_for_user(
|
||||
user_id, room_id
|
||||
)
|
||||
for event_id in event_ids:
|
||||
await self.store.expire_event(event_id)
|
||||
|
||||
await self._room_member_handler.update_membership(
|
||||
create_requester(user, authenticated_entity=self._server_name),
|
||||
requester,
|
||||
user,
|
||||
room_id,
|
||||
"leave",
|
||||
|
||||
@@ -1476,6 +1476,42 @@ class E2eKeysHandler:
|
||||
else:
|
||||
return exists, self.clock.time_msec() < ts_replacable_without_uia_before
|
||||
|
||||
async def has_different_keys(self, user_id: str, body: JsonDict) -> bool:
|
||||
"""
|
||||
Check if a key provided in `body` differs from the same key stored in the DB. Returns
|
||||
true on the first difference. If a key exists in `body` but does not exist in the DB,
|
||||
returns True. If `body` has no keys, this always returns False.
|
||||
Note by 'key' we mean Matrix key rather than JSON key.
|
||||
|
||||
The purpose of this function is to detect whether or not we need to apply UIA checks.
|
||||
We must apply UIA checks if any key in the database is being overwritten. If a key is
|
||||
being inserted for the first time, or if the key exactly matches what is in the database,
|
||||
then no UIA check needs to be performed.
|
||||
|
||||
Args:
|
||||
user_id: The user who sent the `body`.
|
||||
body: The JSON request body from POST /keys/device_signing/upload
|
||||
Returns:
|
||||
True if any key in `body` has a different value in the database.
|
||||
"""
|
||||
# Ensure that each key provided in the request body exactly matches the one we have stored.
|
||||
# The first time we see the DB having a different key to the matching request key, bail.
|
||||
# Note: we do not care if the DB has a key which the request does not specify, as we only
|
||||
# care about *replacements* or *insertions* (i.e UPSERT)
|
||||
req_body_key_to_db_key = {
|
||||
"master_key": "master",
|
||||
"self_signing_key": "self_signing",
|
||||
"user_signing_key": "user_signing",
|
||||
}
|
||||
for req_body_key, db_key in req_body_key_to_db_key.items():
|
||||
if req_body_key in body:
|
||||
existing_key = await self.store.get_e2e_cross_signing_key(
|
||||
user_id, db_key
|
||||
)
|
||||
if existing_key != body[req_body_key]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _check_cross_signing_key(
|
||||
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
|
||||
|
||||
@@ -391,9 +391,9 @@ class RelationsHandler:
|
||||
|
||||
# Attempt to find another event to use as the latest event.
|
||||
potential_events, _ = await self._main_store.get_relations_for_event(
|
||||
room_id,
|
||||
event_id,
|
||||
event,
|
||||
room_id,
|
||||
RelationTypes.THREAD,
|
||||
direction=Direction.FORWARDS,
|
||||
)
|
||||
|
||||
@@ -956,6 +956,7 @@ class RoomCreationHandler:
|
||||
room_alias=room_alias,
|
||||
power_level_content_override=power_level_content_override,
|
||||
creator_join_profile=creator_join_profile,
|
||||
ignore_forced_encryption=ignore_forced_encryption,
|
||||
)
|
||||
|
||||
# we avoid dropping the lock between invites, as otherwise joins can
|
||||
|
||||
@@ -1259,6 +1259,51 @@ class SyncHandler:
|
||||
await_full_state = True
|
||||
lazy_load_members = False
|
||||
|
||||
# For a non-gappy sync if the events in the timeline are simply a linear
|
||||
# chain (i.e. no merging/branching of the graph), then we know the state
|
||||
# delta between the end of the previous sync and start of the new one is
|
||||
# empty.
|
||||
#
|
||||
# c.f. #16941 for an example of why we can't do this for all non-gappy
|
||||
# syncs.
|
||||
is_linear_timeline = True
|
||||
if batch.events:
|
||||
# We need to make sure the first event in our batch points to the
|
||||
# last event in the previous batch.
|
||||
last_event_id_prev_batch = (
|
||||
await self.store.get_last_event_in_room_before_stream_ordering(
|
||||
room_id,
|
||||
end_token=since_token.room_key,
|
||||
)
|
||||
)
|
||||
|
||||
prev_event_id = last_event_id_prev_batch
|
||||
for e in batch.events:
|
||||
if e.prev_event_ids() != [prev_event_id]:
|
||||
is_linear_timeline = False
|
||||
break
|
||||
prev_event_id = e.event_id
|
||||
|
||||
if is_linear_timeline and not batch.limited:
|
||||
state_ids: StateMap[str] = {}
|
||||
if lazy_load_members:
|
||||
if members_to_fetch and batch.events:
|
||||
# We're lazy-loading, so the client might need some more
|
||||
# member events to understand the events in this timeline.
|
||||
# So we fish out all the member events corresponding to the
|
||||
# timeline here. The caller will then dedupe any redundant
|
||||
# ones.
|
||||
|
||||
state_ids = await self._state_storage_controller.get_state_ids_for_event(
|
||||
batch.events[0].event_id,
|
||||
# we only want members!
|
||||
state_filter=StateFilter.from_types(
|
||||
(EventTypes.Member, member) for member in members_to_fetch
|
||||
),
|
||||
await_full_state=False,
|
||||
)
|
||||
return state_ids
|
||||
|
||||
if batch:
|
||||
state_at_timeline_start = (
|
||||
await self._state_storage_controller.get_state_ids_for_event(
|
||||
|
||||
@@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol):
|
||||
self._request.finish()
|
||||
else:
|
||||
# Abort the underlying request since our remote request also failed.
|
||||
self._request.transport.abortConnection()
|
||||
if self._request.channel:
|
||||
self._request.channel.forceAbortClient()
|
||||
|
||||
|
||||
class ProxySite(Site):
|
||||
|
||||
@@ -153,9 +153,9 @@ def return_json_error(
|
||||
# Only respond with an error response if we haven't already started writing,
|
||||
# otherwise lets just kill the connection
|
||||
if request.startedWriting:
|
||||
if request.transport:
|
||||
if request.channel:
|
||||
try:
|
||||
request.transport.abortConnection()
|
||||
request.channel.forceAbortClient()
|
||||
except Exception:
|
||||
# abortConnection throws if the connection is already closed
|
||||
pass
|
||||
@@ -909,7 +909,19 @@ def set_cors_headers(request: "SynapseRequest") -> None:
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
|
||||
)
|
||||
if request.experimental_cors_msc3886:
|
||||
if request.path is not None and (
|
||||
request.path == b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||
or request.path.startswith(b"/_synapse/client/rendezvous")
|
||||
):
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Headers",
|
||||
b"Content-Type, If-Match, If-None-Match",
|
||||
)
|
||||
request.setHeader(
|
||||
b"Access-Control-Expose-Headers",
|
||||
b"Synapse-Trace-Id, Server, ETag",
|
||||
)
|
||||
elif request.experimental_cors_msc3886:
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Headers",
|
||||
b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
|
||||
|
||||
@@ -19,9 +19,11 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" This module contains base REST classes for constructing REST servlets. """
|
||||
"""This module contains base REST classes for constructing REST servlets."""
|
||||
|
||||
import enum
|
||||
import logging
|
||||
import urllib.parse as urlparse
|
||||
from http import HTTPStatus
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
@@ -65,17 +67,49 @@ def parse_integer(request: Request, name: str, default: int) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ...
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, default: int, negative: bool
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
||||
request: Request, name: str, *, default: int, negative: bool = False
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, required: Literal[True], negative: bool = False
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, default: Literal[None], negative: bool = False
|
||||
) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(request: Request, name: str, *, negative: bool) -> Optional[int]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]: ...
|
||||
|
||||
|
||||
def parse_integer(
|
||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]:
|
||||
"""Parse an integer parameter from the request string
|
||||
|
||||
@@ -85,16 +119,17 @@ def parse_integer(
|
||||
default: value to use if the parameter is absent, defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||
defaults to False.
|
||||
|
||||
negative: whether to allow negative integers, defaults to True.
|
||||
Returns:
|
||||
An int value or the default.
|
||||
|
||||
Raises:
|
||||
SynapseError: if the parameter is absent and required, or if the
|
||||
parameter is present and not an integer.
|
||||
SynapseError: if the parameter is absent and required, if the
|
||||
parameter is present and not an integer, or if the
|
||||
parameter is illegitimate negative.
|
||||
"""
|
||||
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||
return parse_integer_from_args(args, name, default, required)
|
||||
return parse_integer_from_args(args, name, default, required, negative)
|
||||
|
||||
|
||||
@overload
|
||||
@@ -120,6 +155,7 @@ def parse_integer_from_args(
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]: ...
|
||||
|
||||
|
||||
@@ -128,6 +164,7 @@ def parse_integer_from_args(
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = True,
|
||||
) -> Optional[int]:
|
||||
"""Parse an integer parameter from the request string
|
||||
|
||||
@@ -137,33 +174,37 @@ def parse_integer_from_args(
|
||||
default: value to use if the parameter is absent, defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||
defaults to False.
|
||||
negative: whether to allow negative integers, defaults to True.
|
||||
|
||||
Returns:
|
||||
An int value or the default.
|
||||
|
||||
Raises:
|
||||
SynapseError: if the parameter is absent and required, or if the
|
||||
parameter is present and not an integer.
|
||||
SynapseError: if the parameter is absent and required, if the
|
||||
parameter is present and not an integer, or if the
|
||||
parameter is illegitimate negative.
|
||||
"""
|
||||
name_bytes = name.encode("ascii")
|
||||
|
||||
if name_bytes in args:
|
||||
try:
|
||||
return int(args[name_bytes][0])
|
||||
except Exception:
|
||||
message = "Query parameter %r must be an integer" % (name,)
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
|
||||
)
|
||||
else:
|
||||
if required:
|
||||
message = "Missing integer query parameter %r" % (name,)
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
else:
|
||||
if name_bytes not in args:
|
||||
if not required:
|
||||
return default
|
||||
|
||||
message = f"Missing required integer query parameter {name}"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||
|
||||
try:
|
||||
integer = int(args[name_bytes][0])
|
||||
except Exception:
|
||||
message = f"Query parameter {name} must be an integer"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||
|
||||
if not negative and integer < 0:
|
||||
message = f"Query parameter {name} must be a positive integer."
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||
|
||||
return integer
|
||||
|
||||
|
||||
@overload
|
||||
def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
|
||||
@@ -410,6 +451,87 @@ def parse_string(
|
||||
)
|
||||
|
||||
|
||||
def parse_json(
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[dict] = None,
|
||||
required: bool = False,
|
||||
encoding: str = "ascii",
|
||||
) -> Optional[JsonDict]:
|
||||
"""
|
||||
Parse a JSON parameter from the request query string.
|
||||
|
||||
Args:
|
||||
request: the twisted HTTP request.
|
||||
name: the name of the query parameter.
|
||||
default: value to use if the parameter is absent,
|
||||
defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the
|
||||
parameter is absent, defaults to False.
|
||||
encoding: The encoding to decode the string content with.
|
||||
|
||||
Returns:
|
||||
A JSON value, or `default` if the named query parameter was not found
|
||||
and `required` was False.
|
||||
|
||||
Raises:
|
||||
SynapseError if the parameter is absent and required, or if the
|
||||
parameter is present and not a JSON object.
|
||||
"""
|
||||
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||
return parse_json_from_args(
|
||||
args,
|
||||
name,
|
||||
default,
|
||||
required=required,
|
||||
encoding=encoding,
|
||||
)
|
||||
|
||||
|
||||
def parse_json_from_args(
|
||||
args: Mapping[bytes, Sequence[bytes]],
|
||||
name: str,
|
||||
default: Optional[dict] = None,
|
||||
required: bool = False,
|
||||
encoding: str = "ascii",
|
||||
) -> Optional[JsonDict]:
|
||||
"""
|
||||
Parse a JSON parameter from the request query string.
|
||||
|
||||
Args:
|
||||
args: a mapping of request args as bytes to a list of bytes (e.g. request.args).
|
||||
name: the name of the query parameter.
|
||||
default: value to use if the parameter is absent,
|
||||
defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the
|
||||
parameter is absent, defaults to False.
|
||||
encoding: the encoding to decode the string content with.
|
||||
|
||||
A JSON value, or `default` if the named query parameter was not found
|
||||
and `required` was False.
|
||||
|
||||
Raises:
|
||||
SynapseError if the parameter is absent and required, or if the
|
||||
parameter is present and not a JSON object.
|
||||
"""
|
||||
name_bytes = name.encode("ascii")
|
||||
|
||||
if name_bytes not in args:
|
||||
if not required:
|
||||
return default
|
||||
|
||||
message = f"Missing required integer query parameter {name}"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||
|
||||
json_str = parse_string_from_args(args, name, required=True, encoding=encoding)
|
||||
|
||||
try:
|
||||
return json_decoder.decode(urlparse.unquote(json_str))
|
||||
except Exception:
|
||||
message = f"Query parameter {name} must be a valid JSON object"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.NOT_JSON)
|
||||
|
||||
|
||||
EnumT = TypeVar("EnumT", bound=enum.Enum)
|
||||
|
||||
|
||||
|
||||
@@ -150,7 +150,8 @@ class SynapseRequest(Request):
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
)
|
||||
self.transport.abortConnection()
|
||||
if self.channel:
|
||||
self.channel.forceAbortClient()
|
||||
return
|
||||
super().handleContentChunk(data)
|
||||
|
||||
|
||||
@@ -205,6 +205,22 @@ class Mailer:
|
||||
template_vars,
|
||||
)
|
||||
|
||||
emails_sent_counter.labels("already_in_use")
|
||||
|
||||
async def send_already_in_use_mail(self, email_address: str) -> None:
|
||||
"""Send an email if the address is already bound to an user account
|
||||
|
||||
Args:
|
||||
email_address: Email address we're sending to the "already in use" mail
|
||||
"""
|
||||
|
||||
await self.send_email(
|
||||
email_address,
|
||||
self.email_subjects.email_already_in_use
|
||||
% {"server_name": self.hs.config.server.server_name, "app": self.app_name},
|
||||
{},
|
||||
)
|
||||
|
||||
emails_sent_counter.labels("add_threepid")
|
||||
|
||||
async def send_add_threepid_mail(
|
||||
|
||||
12
synapse/res/templates/already_in_use.html
Normal file
12
synapse/res/templates/already_in_use.html
Normal file
@@ -0,0 +1,12 @@
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Email already in use{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>You have asked us to register this email with a new Matrix account, but this email is already registered with an existing account.</p>
|
||||
|
||||
<p>Please reset your password if needed.</p>
|
||||
|
||||
<p>If this was not you, you can safely disregard this email.</p>
|
||||
|
||||
<p>Thank you.</p>
|
||||
{% endblock %}
|
||||
10
synapse/res/templates/already_in_use.txt
Normal file
10
synapse/res/templates/already_in_use.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
Hello there,
|
||||
|
||||
You have asked us to register this email with a new Matrix account,
|
||||
but this email is already registered with an existing account.
|
||||
|
||||
Please reset your password if needed.
|
||||
|
||||
If this was not you, you can safely disregard this email.
|
||||
|
||||
Thank you.
|
||||
@@ -23,7 +23,7 @@ from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.constants import Direction
|
||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.federation.transport.server import Authenticator
|
||||
from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
@@ -61,22 +61,8 @@ class ListDestinationsRestServlet(RestServlet):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self._auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
destination = parse_string(request, "destination")
|
||||
|
||||
@@ -195,22 +181,8 @@ class DestinationMembershipRestServlet(RestServlet):
|
||||
if not await self._store.is_destination_known(destination):
|
||||
raise NotFoundError("Unknown destination")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
|
||||
|
||||
|
||||
@@ -311,29 +311,17 @@ class DeleteMediaByDateSize(RestServlet):
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
before_ts = parse_integer(request, "before_ts", required=True)
|
||||
size_gt = parse_integer(request, "size_gt", default=0)
|
||||
before_ts = parse_integer(request, "before_ts", required=True, negative=False)
|
||||
size_gt = parse_integer(request, "size_gt", default=0, negative=False)
|
||||
keep_profiles = parse_boolean(request, "keep_profiles", default=True)
|
||||
|
||||
if before_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter before_ts must be a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
elif before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
||||
if before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter before_ts you provided is from the year 1970. "
|
||||
+ "Double check that you are providing a timestamp in milliseconds.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if size_gt < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter size_gt must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# This check is useless, we keep it for the legacy endpoint only.
|
||||
if server_name is not None and self.server_name != server_name:
|
||||
@@ -389,22 +377,8 @@ class UserMediaRestServlet(RestServlet):
|
||||
if user is None:
|
||||
raise NotFoundError("Unknown user")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
# If neither `order_by` nor `dir` is set, set the default order
|
||||
# to newest media is on top for backward compatibility.
|
||||
@@ -447,22 +421,8 @@ class UserMediaRestServlet(RestServlet):
|
||||
if user is None:
|
||||
raise NotFoundError("Unknown user")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
# If neither `order_by` nor `dir` is set, set the default order
|
||||
# to newest media is on top for backward compatibility.
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import attr
|
||||
|
||||
@@ -38,6 +37,7 @@ from synapse.http.servlet import (
|
||||
assert_params_in_dict,
|
||||
parse_enum,
|
||||
parse_integer,
|
||||
parse_json,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
)
|
||||
@@ -51,7 +51,6 @@ from synapse.storage.databases.main.room import RoomSortOrder
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.api.auth import Auth
|
||||
@@ -776,14 +775,8 @@ class RoomEventContextServlet(RestServlet):
|
||||
limit = parse_integer(request, "limit", default=10)
|
||||
|
||||
# picking the API shape for symmetry with /messages
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
else:
|
||||
event_filter = None
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
event_context = await self.room_context_handler.get_event_context(
|
||||
requester,
|
||||
@@ -914,21 +907,16 @@ class RoomMessagesRestServlet(RestServlet):
|
||||
)
|
||||
# Twisted will have processed the args by now.
|
||||
assert request.args is not None
|
||||
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
as_client_event = b"raw" not in request.args
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client")
|
||||
== "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
else:
|
||||
event_filter = None
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
|
||||
msgs = await self._pagination_handler.get_messages(
|
||||
room_id=room_id,
|
||||
|
||||
@@ -63,38 +63,12 @@ class UserMediaStatisticsRestServlet(RestServlet):
|
||||
),
|
||||
)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
from_ts = parse_integer(request, "from_ts", default=0, negative=False)
|
||||
until_ts = parse_integer(request, "until_ts", negative=False)
|
||||
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
from_ts = parse_integer(request, "from_ts", default=0)
|
||||
if from_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from_ts must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
until_ts = parse_integer(request, "until_ts")
|
||||
if until_ts is not None:
|
||||
if until_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter until_ts must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if until_ts <= from_ts:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
|
||||
@@ -90,22 +90,8 @@ class UsersRestServletV2(RestServlet):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
user_id = parse_string(request, "user_id")
|
||||
name = parse_string(request, "name", encoding="utf-8")
|
||||
|
||||
@@ -81,8 +81,7 @@ class AccountDataServlet(RestServlet):
|
||||
raise AuthError(403, "Cannot add account data for other users.")
|
||||
|
||||
# Raise an error if the account data type cannot be set directly.
|
||||
if self._hs.config.experimental.msc4010_push_rules_account_data:
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
|
||||
@@ -108,10 +107,7 @@ class AccountDataServlet(RestServlet):
|
||||
raise AuthError(403, "Cannot get account data for other users.")
|
||||
|
||||
# Push rules are stored in a separate table and must be queried separately.
|
||||
if (
|
||||
self._hs.config.experimental.msc4010_push_rules_account_data
|
||||
and account_data_type == AccountDataTypes.PUSH_RULES
|
||||
):
|
||||
if account_data_type == AccountDataTypes.PUSH_RULES:
|
||||
account_data: Optional[JsonMapping] = (
|
||||
await self._push_rules_handler.push_rules_for_user(requester.user)
|
||||
)
|
||||
@@ -162,8 +158,7 @@ class UnstableAccountDataServlet(RestServlet):
|
||||
raise AuthError(403, "Cannot delete account data for other users.")
|
||||
|
||||
# Raise an error if the account data type cannot be set directly.
|
||||
if self._hs.config.experimental.msc4010_push_rules_account_data:
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
|
||||
await self.handler.remove_account_data_for_user(user_id, account_data_type)
|
||||
|
||||
@@ -209,15 +204,7 @@ class RoomAccountDataServlet(RestServlet):
|
||||
)
|
||||
|
||||
# Raise an error if the account data type cannot be set directly.
|
||||
if self._hs.config.experimental.msc4010_push_rules_account_data:
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
elif account_data_type == ReceiptTypes.FULLY_READ:
|
||||
raise SynapseError(
|
||||
405,
|
||||
"Cannot set m.fully_read through this API."
|
||||
" Use /rooms/!roomId:server.name/read_markers",
|
||||
Codes.BAD_JSON,
|
||||
)
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
|
||||
@@ -256,10 +243,7 @@ class RoomAccountDataServlet(RestServlet):
|
||||
)
|
||||
|
||||
# Room-specific push rules are not currently supported.
|
||||
if (
|
||||
self._hs.config.experimental.msc4010_push_rules_account_data
|
||||
and account_data_type == AccountDataTypes.PUSH_RULES
|
||||
):
|
||||
if account_data_type == AccountDataTypes.PUSH_RULES:
|
||||
account_data: Optional[JsonMapping] = {}
|
||||
else:
|
||||
account_data = await self.store.get_account_data_for_room_and_type(
|
||||
@@ -317,8 +301,7 @@ class UnstableRoomAccountDataServlet(RestServlet):
|
||||
)
|
||||
|
||||
# Raise an error if the account data type cannot be set directly.
|
||||
if self._hs.config.experimental.msc4010_push_rules_account_data:
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
_check_can_set_account_data_type(account_data_type)
|
||||
|
||||
await self.handler.remove_account_data_for_room(
|
||||
user_id, room_id, account_data_type
|
||||
|
||||
@@ -393,23 +393,37 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
# time. Because there is no UIA in MSC3861, for now we throw an error if the
|
||||
# user tries to reset the device signing key when MSC3861 is enabled, but allow
|
||||
# first-time setup.
|
||||
#
|
||||
# XXX: We now have a get-out clause by which MAS can temporarily mark the master
|
||||
# key as replaceable. It should do its own equivalent of user interactive auth
|
||||
# before doing so.
|
||||
if self.hs.config.experimental.msc3861.enabled:
|
||||
# The auth service has to explicitly mark the master key as replaceable
|
||||
# without UIA to reset the device signing key with MSC3861.
|
||||
if is_cross_signing_setup and not master_key_updatable_without_uia:
|
||||
config = self.hs.config.experimental.msc3861
|
||||
if config.account_management_url is not None:
|
||||
url = f"{config.account_management_url}?action=org.matrix.cross_signing_reset"
|
||||
else:
|
||||
url = config.issuer
|
||||
|
||||
raise SynapseError(
|
||||
HTTPStatus.NOT_IMPLEMENTED,
|
||||
"Resetting cross signing keys is not yet supported with MSC3861",
|
||||
"To reset your end-to-end encryption cross-signing identity, "
|
||||
f"you first need to approve it at {url} and then try again.",
|
||||
Codes.UNRECOGNIZED,
|
||||
)
|
||||
# But first-time setup is fine
|
||||
|
||||
elif self.hs.config.experimental.msc3967_enabled:
|
||||
# If we already have a master key then cross signing is set up and we require UIA to reset
|
||||
# MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same
|
||||
# keys should just 200 OK without doing a UIA prompt.
|
||||
keys_are_different = await self.e2e_keys_handler.has_different_keys(
|
||||
user_id, body
|
||||
)
|
||||
if not keys_are_different:
|
||||
# FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly
|
||||
# if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a
|
||||
# unique key constraint violation. This sounds like a bug?
|
||||
return 200, {}
|
||||
# the keys are different, is x-signing set up? If no, then the keys don't exist which is
|
||||
# why they are different. If yes, then we need to UIA to change them.
|
||||
if is_cross_signing_setup:
|
||||
await self.auth_handler.validate_user_via_ui_auth(
|
||||
requester,
|
||||
@@ -420,7 +434,6 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
can_skip_ui_auth=False,
|
||||
)
|
||||
# Otherwise we don't require UIA since we are setting up cross signing for first time
|
||||
|
||||
else:
|
||||
# Previous behaviour is to always require UIA but allow it to be skipped
|
||||
await self.auth_handler.validate_user_via_ui_auth(
|
||||
|
||||
@@ -86,12 +86,18 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||
self.config = hs.config
|
||||
|
||||
if self.hs.config.email.can_verify_email:
|
||||
self.mailer = Mailer(
|
||||
self.registration_mailer = Mailer(
|
||||
hs=self.hs,
|
||||
app_name=self.config.email.email_app_name,
|
||||
template_html=self.config.email.email_registration_template_html,
|
||||
template_text=self.config.email.email_registration_template_text,
|
||||
)
|
||||
self.already_in_use_mailer = Mailer(
|
||||
hs=self.hs,
|
||||
app_name=self.config.email.email_app_name,
|
||||
template_html=self.config.email.email_already_in_use_template_html,
|
||||
template_text=self.config.email.email_already_in_use_template_text,
|
||||
)
|
||||
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
if not self.hs.config.email.can_verify_email:
|
||||
@@ -139,8 +145,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||
if self.hs.config.server.request_token_inhibit_3pid_errors:
|
||||
# Make the client think the operation succeeded. See the rationale in the
|
||||
# comments for request_token_inhibit_3pid_errors.
|
||||
# Still send an email to warn the user that an account already exists.
|
||||
# Also wait for some random amount of time between 100ms and 1s to make it
|
||||
# look like we did something.
|
||||
await self.already_in_use_mailer.send_already_in_use_mail(email)
|
||||
await self.hs.get_clock().sleep(random.randint(1, 10) / 10)
|
||||
return 200, {"sid": random_string(16)}
|
||||
|
||||
@@ -151,7 +159,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||
email,
|
||||
client_secret,
|
||||
send_attempt,
|
||||
self.mailer.send_registration_mail,
|
||||
self.registration_mailer.send_registration_mail,
|
||||
next_link,
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ class RelationPaginationServlet(RestServlet):
|
||||
self.auth = hs.get_auth()
|
||||
self._store = hs.get_datastores().main
|
||||
self._relations_handler = hs.get_relations_handler()
|
||||
self._support_recurse = hs.config.experimental.msc3981_recurse_relations
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@@ -70,12 +69,9 @@ class RelationPaginationServlet(RestServlet):
|
||||
pagination_config = await PaginationConfig.from_request(
|
||||
self._store, request, default_limit=5, default_dir=Direction.BACKWARDS
|
||||
)
|
||||
if self._support_recurse:
|
||||
recurse = parse_boolean(request, "recurse", default=False) or parse_boolean(
|
||||
request, "org.matrix.msc3981.recurse", default=False
|
||||
)
|
||||
else:
|
||||
recurse = False
|
||||
recurse = parse_boolean(request, "recurse", default=False) or parse_boolean(
|
||||
request, "org.matrix.msc3981.recurse", default=False
|
||||
)
|
||||
|
||||
# The unstable version of this API returns an extra field for client
|
||||
# compatibility, see https://github.com/matrix-org/synapse/issues/12930.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
# Copyright (C) 2023-2024 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
@@ -34,7 +34,7 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RendezvousServlet(RestServlet):
|
||||
class MSC3886RendezvousServlet(RestServlet):
|
||||
"""
|
||||
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
|
||||
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
|
||||
@@ -76,6 +76,46 @@ class RendezvousServlet(RestServlet):
|
||||
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
|
||||
|
||||
|
||||
class MSC4108DelegationRendezvousServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
|
||||
)
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
redirection_target: Optional[str] = (
|
||||
hs.config.experimental.msc4108_delegation_endpoint
|
||||
)
|
||||
assert (
|
||||
redirection_target is not None
|
||||
), "Servlet is only registered if there is a delegation target"
|
||||
self.endpoint = redirection_target.encode("utf-8")
|
||||
|
||||
async def on_POST(self, request: SynapseRequest) -> None:
|
||||
respond_with_redirect(
|
||||
request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True
|
||||
)
|
||||
|
||||
|
||||
class MSC4108RendezvousServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
|
||||
)
|
||||
|
||||
def __init__(self, hs: "HomeServer") -> None:
|
||||
super().__init__()
|
||||
self._handler = hs.get_rendezvous_handler()
|
||||
|
||||
def on_POST(self, request: SynapseRequest) -> None:
|
||||
self._handler.handle_post(request)
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
if hs.config.experimental.msc3886_endpoint is not None:
|
||||
RendezvousServlet(hs).register(http_server)
|
||||
MSC3886RendezvousServlet(hs).register(http_server)
|
||||
|
||||
if hs.config.experimental.msc4108_enabled:
|
||||
MSC4108RendezvousServlet(hs).register(http_server)
|
||||
|
||||
if hs.config.experimental.msc4108_delegation_endpoint is not None:
|
||||
MSC4108DelegationRendezvousServlet(hs).register(http_server)
|
||||
|
||||
@@ -52,6 +52,7 @@ from synapse.http.servlet import (
|
||||
parse_boolean,
|
||||
parse_enum,
|
||||
parse_integer,
|
||||
parse_json,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
parse_strings_from_args,
|
||||
@@ -65,7 +66,6 @@ from synapse.rest.client.transactions import HttpTransactionCache
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.cancellation import cancellable
|
||||
from synapse.util.stringutils import parse_and_validate_server_name, random_string
|
||||
|
||||
@@ -499,7 +499,7 @@ class PublicRoomListRestServlet(RestServlet):
|
||||
if server:
|
||||
raise e
|
||||
|
||||
limit: Optional[int] = parse_integer(request, "limit", 0)
|
||||
limit: Optional[int] = parse_integer(request, "limit", 0, negative=False)
|
||||
since_token = parse_string(request, "since")
|
||||
|
||||
if limit == 0:
|
||||
@@ -703,21 +703,16 @@ class RoomMessageListRestServlet(RestServlet):
|
||||
)
|
||||
# Twisted will have processed the args by now.
|
||||
assert request.args is not None
|
||||
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
as_client_event = b"raw" not in request.args
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client")
|
||||
== "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
else:
|
||||
event_filter = None
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
|
||||
msgs = await self.pagination_handler.get_messages(
|
||||
room_id=room_id,
|
||||
@@ -898,14 +893,8 @@ class RoomEventContextServlet(RestServlet):
|
||||
limit = parse_integer(request, "limit", default=10)
|
||||
|
||||
# picking the API shape for symmetry with /messages
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
else:
|
||||
event_filter = None
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
event_context = await self.room_context_handler.get_event_context(
|
||||
requester, room_id, event_id, limit, event_filter
|
||||
@@ -1453,10 +1442,16 @@ class RoomHierarchyRestServlet(RestServlet):
|
||||
|
||||
class RoomSummaryRestServlet(ResolveRoomIdMixin, RestServlet):
|
||||
PATTERNS = (
|
||||
# deprecated endpoint, to be removed
|
||||
re.compile(
|
||||
"^/_matrix/client/unstable/im.nheko.summary"
|
||||
"/rooms/(?P<room_identifier>[^/]*)/summary$"
|
||||
),
|
||||
# recommended endpoint
|
||||
re.compile(
|
||||
"^/_matrix/client/unstable/im.nheko.summary"
|
||||
"/summary/(?P<room_identifier>[^/]*)$"
|
||||
),
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
|
||||
@@ -132,13 +132,22 @@ class VersionsRestServlet(RestServlet):
|
||||
# Adds support for relation-based redactions as per MSC3912.
|
||||
"org.matrix.msc3912": self.config.experimental.msc3912_enabled,
|
||||
# Whether recursively provide relations is supported.
|
||||
"org.matrix.msc3981": self.config.experimental.msc3981_recurse_relations,
|
||||
# TODO This is no longer needed once unstable MSC3981 does not need to be supported.
|
||||
"org.matrix.msc3981": True,
|
||||
# Adds support for deleting account data.
|
||||
"org.matrix.msc3391": self.config.experimental.msc3391_enabled,
|
||||
# Allows clients to inhibit profile update propagation.
|
||||
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
|
||||
# Allows clients to handle push for encrypted events.
|
||||
"org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events,
|
||||
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||
"org.matrix.msc4108": (
|
||||
self.config.experimental.msc4108_enabled
|
||||
or (
|
||||
self.config.experimental.msc4108_delegation_endpoint
|
||||
is not None
|
||||
)
|
||||
),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -72,9 +72,6 @@ class PreviewUrlResource(RestServlet):
|
||||
# XXX: if get_user_by_req fails, what should we do in an async render?
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
url = parse_string(request, "url", required=True)
|
||||
ts = parse_integer(request, "ts")
|
||||
if ts is None:
|
||||
ts = self.clock.time_msec()
|
||||
|
||||
ts = parse_integer(request, "ts", default=self.clock.time_msec())
|
||||
og = await self.url_previewer.preview(url, requester.user, ts)
|
||||
respond_with_json_bytes(request, 200, og, send_cors=True)
|
||||
|
||||
@@ -26,6 +26,7 @@ from twisted.web.resource import Resource
|
||||
from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource
|
||||
from synapse.rest.synapse.client.pick_idp import PickIdpResource
|
||||
from synapse.rest.synapse.client.pick_username import pick_username_resource
|
||||
from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource
|
||||
from synapse.rest.synapse.client.sso_register import SsoRegisterResource
|
||||
from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource
|
||||
|
||||
@@ -76,6 +77,9 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc
|
||||
# To be removed in Synapse v1.32.0.
|
||||
resources["/_matrix/saml2"] = res
|
||||
|
||||
if hs.config.experimental.msc4108_enabled:
|
||||
resources["/_synapse/client/rendezvous"] = MSC4108RendezvousSessionResource(hs)
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
|
||||
58
synapse/rest/synapse/client/rendezvous.py
Normal file
58
synapse/rest/synapse/client/rendezvous.py
Normal file
@@ -0,0 +1,58 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2024 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
from synapse.api.errors import UnrecognizedRequestError
|
||||
from synapse.http.server import DirectServeJsonResource
|
||||
from synapse.http.site import SynapseRequest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MSC4108RendezvousSessionResource(DirectServeJsonResource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, hs: "HomeServer") -> None:
|
||||
super().__init__()
|
||||
self._handler = hs.get_rendezvous_handler()
|
||||
|
||||
async def _async_render_GET(self, request: SynapseRequest) -> None:
|
||||
postpath: List[bytes] = request.postpath # type: ignore
|
||||
if len(postpath) != 1:
|
||||
raise UnrecognizedRequestError()
|
||||
session_id = postpath[0].decode("ascii")
|
||||
|
||||
self._handler.handle_get(request, session_id)
|
||||
|
||||
def _async_render_PUT(self, request: SynapseRequest) -> None:
|
||||
postpath: List[bytes] = request.postpath # type: ignore
|
||||
if len(postpath) != 1:
|
||||
raise UnrecognizedRequestError()
|
||||
session_id = postpath[0].decode("ascii")
|
||||
|
||||
self._handler.handle_put(request, session_id)
|
||||
|
||||
def _async_render_DELETE(self, request: SynapseRequest) -> None:
|
||||
postpath: List[bytes] = request.postpath # type: ignore
|
||||
if len(postpath) != 1:
|
||||
raise UnrecognizedRequestError()
|
||||
session_id = postpath[0].decode("ascii")
|
||||
|
||||
self._handler.handle_delete(request, session_id)
|
||||
@@ -143,6 +143,7 @@ from synapse.state import StateHandler, StateResolutionHandler
|
||||
from synapse.storage import Databases
|
||||
from synapse.storage.controllers import StorageControllers
|
||||
from synapse.streams.events import EventSources
|
||||
from synapse.synapse_rust.rendezvous import RendezvousHandler
|
||||
from synapse.types import DomainSpecificString, ISynapseReactor
|
||||
from synapse.util import Clock
|
||||
from synapse.util.distributor import Distributor
|
||||
@@ -859,6 +860,10 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
def get_room_forgetter_handler(self) -> RoomForgetterHandler:
|
||||
return RoomForgetterHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_rendezvous_handler(self) -> RendezvousHandler:
|
||||
return RendezvousHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_outbound_redis_connection(self) -> "ConnectionHandler":
|
||||
"""
|
||||
|
||||
@@ -318,7 +318,13 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||
self._invalidate_local_get_event_cache(redacts) # type: ignore[attr-defined]
|
||||
# Caches which might leak edits must be invalidated for the event being
|
||||
# redacted.
|
||||
self._attempt_to_invalidate_cache("get_relations_for_event", (redacts,))
|
||||
self._attempt_to_invalidate_cache(
|
||||
"get_relations_for_event",
|
||||
(
|
||||
room_id,
|
||||
redacts,
|
||||
),
|
||||
)
|
||||
self._attempt_to_invalidate_cache("get_applicable_edit", (redacts,))
|
||||
self._attempt_to_invalidate_cache("get_thread_id", (redacts,))
|
||||
self._attempt_to_invalidate_cache("get_thread_id_for_receipts", (redacts,))
|
||||
@@ -345,7 +351,13 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||
)
|
||||
|
||||
if relates_to:
|
||||
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
|
||||
self._attempt_to_invalidate_cache(
|
||||
"get_relations_for_event",
|
||||
(
|
||||
room_id,
|
||||
relates_to,
|
||||
),
|
||||
)
|
||||
self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,))
|
||||
self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,))
|
||||
self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,))
|
||||
@@ -380,9 +392,9 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||
self._attempt_to_invalidate_cache(
|
||||
"get_unread_event_push_actions_by_room_for_user", (room_id,)
|
||||
)
|
||||
self._attempt_to_invalidate_cache("get_relations_for_event", (room_id,))
|
||||
|
||||
self._attempt_to_invalidate_cache("_get_membership_from_event_id", None)
|
||||
self._attempt_to_invalidate_cache("get_relations_for_event", None)
|
||||
self._attempt_to_invalidate_cache("get_applicable_edit", None)
|
||||
self._attempt_to_invalidate_cache("get_thread_id", None)
|
||||
self._attempt_to_invalidate_cache("get_thread_id_for_receipts", None)
|
||||
|
||||
@@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
WITH all_receipts AS (
|
||||
SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
{receipt_types_clause}
|
||||
AND user_id = ?
|
||||
@@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
SELECT notif_count, COALESCE(unread_count, 0), thread_id
|
||||
FROM event_push_summary
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
sql = f"""
|
||||
SELECT COUNT(*), thread_id FROM event_push_actions
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
thread_id
|
||||
FROM event_push_actions
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
# given this function generally gets called with only one room and
|
||||
# thread ID.
|
||||
sql = f"""
|
||||
SELECT room_id, thread_id, MAX(stream_ordering)
|
||||
SELECT room_id, thread_id, MAX(event_stream_ordering)
|
||||
FROM receipts_linearized
|
||||
INNER JOIN events USING (room_id, event_id)
|
||||
WHERE {receipt_types_clause}
|
||||
AND {thread_ids_clause}
|
||||
AND {room_ids_clause}
|
||||
@@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
)
|
||||
|
||||
sql = """
|
||||
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering
|
||||
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering
|
||||
FROM receipts_linearized AS r
|
||||
INNER JOIN events AS e USING (event_id)
|
||||
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
|
||||
ORDER BY r.stream_id ASC
|
||||
LIMIT ?
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
import collections
|
||||
import itertools
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
@@ -53,6 +54,7 @@ from synapse.storage.database import (
|
||||
LoggingDatabaseConnection,
|
||||
LoggingTransaction,
|
||||
)
|
||||
from synapse.storage.databases.main.event_federation import EventFederationStore
|
||||
from synapse.storage.databases.main.events_worker import EventCacheEntry
|
||||
from synapse.storage.databases.main.search import SearchEntry
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
@@ -768,40 +770,26 @@ class PersistEventsStore:
|
||||
# that have the same chain ID as the event.
|
||||
# 2. For each retained auth event we:
|
||||
# a. Add a link from the event's to the auth event's chain
|
||||
# ID/sequence number; and
|
||||
# b. Add a link from the event to every chain reachable by the
|
||||
# auth event.
|
||||
# ID/sequence number
|
||||
|
||||
# Step 1, fetch all existing links from all the chains we've seen
|
||||
# referenced.
|
||||
chain_links = _LinkMap()
|
||||
auth_chain_rows = cast(
|
||||
List[Tuple[int, int, int, int]],
|
||||
db_pool.simple_select_many_txn(
|
||||
txn,
|
||||
table="event_auth_chain_links",
|
||||
column="origin_chain_id",
|
||||
iterable={chain_id for chain_id, _ in chain_map.values()},
|
||||
keyvalues={},
|
||||
retcols=(
|
||||
"origin_chain_id",
|
||||
"origin_sequence_number",
|
||||
"target_chain_id",
|
||||
"target_sequence_number",
|
||||
),
|
||||
),
|
||||
)
|
||||
for (
|
||||
origin_chain_id,
|
||||
origin_sequence_number,
|
||||
target_chain_id,
|
||||
target_sequence_number,
|
||||
) in auth_chain_rows:
|
||||
chain_links.add_link(
|
||||
(origin_chain_id, origin_sequence_number),
|
||||
(target_chain_id, target_sequence_number),
|
||||
new=False,
|
||||
)
|
||||
|
||||
for links in EventFederationStore._get_chain_links(
|
||||
txn, {chain_id for chain_id, _ in chain_map.values()}
|
||||
):
|
||||
for origin_chain_id, inner_links in links.items():
|
||||
for (
|
||||
origin_sequence_number,
|
||||
target_chain_id,
|
||||
target_sequence_number,
|
||||
) in inner_links:
|
||||
chain_links.add_link(
|
||||
(origin_chain_id, origin_sequence_number),
|
||||
(target_chain_id, target_sequence_number),
|
||||
new=False,
|
||||
)
|
||||
|
||||
# We do this in toplogical order to avoid adding redundant links.
|
||||
for event_id in sorted_topologically(
|
||||
@@ -836,18 +824,6 @@ class PersistEventsStore:
|
||||
(chain_id, sequence_number), (auth_chain_id, auth_sequence_number)
|
||||
)
|
||||
|
||||
# Step 2b, add a link to chains reachable from the auth
|
||||
# event.
|
||||
for target_id, target_seq in chain_links.get_links_from(
|
||||
(auth_chain_id, auth_sequence_number)
|
||||
):
|
||||
if target_id == chain_id:
|
||||
continue
|
||||
|
||||
chain_links.add_link(
|
||||
(chain_id, sequence_number), (target_id, target_seq)
|
||||
)
|
||||
|
||||
db_pool.simple_insert_many_txn(
|
||||
txn,
|
||||
table="event_auth_chain_links",
|
||||
@@ -1947,7 +1923,12 @@ class PersistEventsStore:
|
||||
|
||||
# Any relation information for the related event must be cleared.
|
||||
self.store._invalidate_cache_and_stream(
|
||||
txn, self.store.get_relations_for_event, (redacted_relates_to,)
|
||||
txn,
|
||||
self.store.get_relations_for_event,
|
||||
(
|
||||
room_id,
|
||||
redacted_relates_to,
|
||||
),
|
||||
)
|
||||
if rel_type == RelationTypes.REFERENCE:
|
||||
self.store._invalidate_cache_and_stream(
|
||||
@@ -2451,31 +2432,6 @@ class _LinkMap:
|
||||
current_links[src_seq] = target_seq
|
||||
return True
|
||||
|
||||
def get_links_from(
|
||||
self, src_tuple: Tuple[int, int]
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
"""Gets the chains reachable from the given chain/sequence number.
|
||||
|
||||
Yields:
|
||||
The chain ID and sequence number the link points to.
|
||||
"""
|
||||
src_chain, src_seq = src_tuple
|
||||
for target_id, sequence_numbers in self.maps.get(src_chain, {}).items():
|
||||
for link_src_seq, target_seq in sequence_numbers.items():
|
||||
if link_src_seq <= src_seq:
|
||||
yield target_id, target_seq
|
||||
|
||||
def get_links_between(
|
||||
self, source_chain: int, target_chain: int
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
"""Gets the links between two chains.
|
||||
|
||||
Yields:
|
||||
The source and target sequence numbers.
|
||||
"""
|
||||
|
||||
yield from self.maps.get(source_chain, {}).get(target_chain, {}).items()
|
||||
|
||||
def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]:
|
||||
"""Gets any newly added links.
|
||||
|
||||
@@ -2502,9 +2458,24 @@ class _LinkMap:
|
||||
if src_chain == target_chain:
|
||||
return target_seq <= src_seq
|
||||
|
||||
links = self.get_links_between(src_chain, target_chain)
|
||||
for link_start_seq, link_end_seq in links:
|
||||
if link_start_seq <= src_seq and target_seq <= link_end_seq:
|
||||
return True
|
||||
# We have to graph traverse the links to check for indirect paths.
|
||||
visited_chains: Dict[int, int] = collections.Counter()
|
||||
search = [(src_chain, src_seq)]
|
||||
while search:
|
||||
chain, seq = search.pop()
|
||||
visited_chains[chain] = max(seq, visited_chains[chain])
|
||||
for tc, links in self.maps.get(chain, {}).items():
|
||||
for ss, ts in links.items():
|
||||
# Don't revisit chains we've already seen, unless the target
|
||||
# sequence number is higher than last time.
|
||||
if ts <= visited_chains.get(tc, 0):
|
||||
continue
|
||||
|
||||
if ss <= seq:
|
||||
if tc == target_chain:
|
||||
if target_seq <= ts:
|
||||
return True
|
||||
else:
|
||||
search.append((tc, ts))
|
||||
|
||||
return False
|
||||
|
||||
@@ -1181,7 +1181,7 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||
|
||||
results = list(txn)
|
||||
# (event_id, parent_id, rel_type) for each relation
|
||||
relations_to_insert: List[Tuple[str, str, str]] = []
|
||||
relations_to_insert: List[Tuple[str, str, str, str]] = []
|
||||
for event_id, event_json_raw in results:
|
||||
try:
|
||||
event_json = db_to_json(event_json_raw)
|
||||
@@ -1214,7 +1214,8 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||
if not isinstance(parent_id, str):
|
||||
continue
|
||||
|
||||
relations_to_insert.append((event_id, parent_id, rel_type))
|
||||
room_id = event_json["room_id"]
|
||||
relations_to_insert.append((room_id, event_id, parent_id, rel_type))
|
||||
|
||||
# Insert the missing data, note that we upsert here in case the event
|
||||
# has already been processed.
|
||||
@@ -1223,18 +1224,27 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||
txn=txn,
|
||||
table="event_relations",
|
||||
key_names=("event_id",),
|
||||
key_values=[(r[0],) for r in relations_to_insert],
|
||||
key_values=[(r[1],) for r in relations_to_insert],
|
||||
value_names=("relates_to_id", "relation_type"),
|
||||
value_values=[r[1:] for r in relations_to_insert],
|
||||
value_values=[r[2:] for r in relations_to_insert],
|
||||
)
|
||||
|
||||
# Iterate the parent IDs and invalidate caches.
|
||||
cache_tuples = {(r[1],) for r in relations_to_insert}
|
||||
self._invalidate_cache_and_stream_bulk( # type: ignore[attr-defined]
|
||||
txn, self.get_relations_for_event, cache_tuples # type: ignore[attr-defined]
|
||||
txn,
|
||||
self.get_relations_for_event, # type: ignore[attr-defined]
|
||||
{
|
||||
(
|
||||
r[0], # room_id
|
||||
r[2], # parent_id
|
||||
)
|
||||
for r in relations_to_insert
|
||||
},
|
||||
)
|
||||
self._invalidate_cache_and_stream_bulk( # type: ignore[attr-defined]
|
||||
txn, self.get_thread_summary, cache_tuples # type: ignore[attr-defined]
|
||||
txn,
|
||||
self.get_thread_summary, # type: ignore[attr-defined]
|
||||
{(r[1],) for r in relations_to_insert},
|
||||
)
|
||||
|
||||
if results:
|
||||
|
||||
@@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
)
|
||||
|
||||
sql = f"""
|
||||
SELECT event_id, stream_ordering
|
||||
SELECT event_id, event_stream_ordering
|
||||
FROM receipts_linearized
|
||||
INNER JOIN events USING (room_id, event_id)
|
||||
WHERE {clause}
|
||||
AND user_id = ?
|
||||
AND room_id = ?
|
||||
AND thread_id IS NULL
|
||||
ORDER BY stream_ordering DESC
|
||||
ORDER BY event_stream_ordering DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
@@ -735,10 +734,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
thread_clause = "r.thread_id = ?"
|
||||
thread_args = (thread_id,)
|
||||
|
||||
# If the receipt doesn't have a stream ordering it is because we
|
||||
# don't have the associated event, and so must be a remote receipt.
|
||||
# Hence it's safe to just allow new receipts to clobber it.
|
||||
sql = f"""
|
||||
SELECT stream_ordering, event_id FROM events
|
||||
INNER JOIN receipts_linearized AS r USING (event_id, room_id)
|
||||
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause}
|
||||
SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r
|
||||
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?
|
||||
AND r.event_stream_ordering IS NOT NULL AND {thread_clause}
|
||||
"""
|
||||
txn.execute(
|
||||
sql,
|
||||
|
||||
@@ -2108,6 +2108,13 @@ class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
|
||||
unique=False,
|
||||
)
|
||||
|
||||
self.db_pool.updates.register_background_index_update(
|
||||
update_name="access_tokens_refresh_token_id_idx",
|
||||
index_name="access_tokens_refresh_token_id_idx",
|
||||
table="access_tokens",
|
||||
columns=("refresh_token_id",),
|
||||
)
|
||||
|
||||
async def _background_update_set_deactivated_flag(
|
||||
self, progress: JsonDict, batch_size: int
|
||||
) -> int:
|
||||
@@ -2266,13 +2273,6 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
|
||||
):
|
||||
super().__init__(database, db_conn, hs)
|
||||
|
||||
self.db_pool.updates.register_background_index_update(
|
||||
update_name="access_tokens_refresh_token_id_idx",
|
||||
index_name="access_tokens_refresh_token_id_idx",
|
||||
table="access_tokens",
|
||||
columns=("refresh_token_id",),
|
||||
)
|
||||
|
||||
self._ignore_unknown_session_error = (
|
||||
hs.config.server.request_token_inhibit_3pid_errors
|
||||
)
|
||||
|
||||
@@ -169,9 +169,9 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||
@cached(uncached_args=("event",), tree=True)
|
||||
async def get_relations_for_event(
|
||||
self,
|
||||
room_id: str,
|
||||
event_id: str,
|
||||
event: EventBase,
|
||||
room_id: str,
|
||||
relation_type: Optional[str] = None,
|
||||
event_type: Optional[str] = None,
|
||||
limit: int = 5,
|
||||
|
||||
@@ -1234,6 +1234,28 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||
|
||||
return set(room_ids)
|
||||
|
||||
async def get_membership_event_ids_for_user(
|
||||
self, user_id: str, room_id: str
|
||||
) -> Set[str]:
|
||||
"""Get all event_ids for the given user and room.
|
||||
|
||||
Args:
|
||||
user_id: The user ID to get the event IDs for.
|
||||
room_id: The room ID to look up events for.
|
||||
|
||||
Returns:
|
||||
Set of event IDs
|
||||
"""
|
||||
|
||||
event_ids = await self.db_pool.simple_select_onecol(
|
||||
table="room_memberships",
|
||||
keyvalues={"user_id": user_id, "room_id": room_id},
|
||||
retcol="event_id",
|
||||
desc="get_membership_event_ids_for_user",
|
||||
)
|
||||
|
||||
return set(event_ids)
|
||||
|
||||
@cached(max_entries=5000)
|
||||
async def _get_membership_from_event_id(
|
||||
self, member_event_id: str
|
||||
|
||||
@@ -470,6 +470,8 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
count_args = args
|
||||
count_clauses = clauses
|
||||
|
||||
sqlite_highlights: List[str] = []
|
||||
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
search_query = search_term
|
||||
sql = """
|
||||
@@ -486,7 +488,7 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
"""
|
||||
count_args = [search_query] + count_args
|
||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||
search_query = _parse_query_for_sqlite(search_term)
|
||||
search_query, sqlite_highlights = _parse_query_for_sqlite(search_term)
|
||||
|
||||
sql = """
|
||||
SELECT rank(matchinfo(event_search)) as rank, room_id, event_id
|
||||
@@ -531,9 +533,11 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
|
||||
event_map = {ev.event_id: ev for ev in events}
|
||||
|
||||
highlights = None
|
||||
highlights: Collection[str] = []
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
highlights = await self._find_highlights_in_postgres(search_query, events)
|
||||
else:
|
||||
highlights = sqlite_highlights
|
||||
|
||||
count_sql += " GROUP BY room_id"
|
||||
|
||||
@@ -597,6 +601,8 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
count_args = list(args)
|
||||
count_clauses = list(clauses)
|
||||
|
||||
sqlite_highlights: List[str] = []
|
||||
|
||||
if pagination_token:
|
||||
try:
|
||||
origin_server_ts_str, stream_str = pagination_token.split(",")
|
||||
@@ -647,7 +653,7 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
CROSS JOIN events USING (event_id)
|
||||
WHERE
|
||||
"""
|
||||
search_query = _parse_query_for_sqlite(search_term)
|
||||
search_query, sqlite_highlights = _parse_query_for_sqlite(search_term)
|
||||
args = [search_query] + args
|
||||
|
||||
count_sql = """
|
||||
@@ -694,9 +700,11 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||
|
||||
event_map = {ev.event_id: ev for ev in events}
|
||||
|
||||
highlights = None
|
||||
highlights: Collection[str] = []
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
highlights = await self._find_highlights_in_postgres(search_query, events)
|
||||
else:
|
||||
highlights = sqlite_highlights
|
||||
|
||||
count_sql += " GROUP BY room_id"
|
||||
|
||||
@@ -892,19 +900,25 @@ def _tokenize_query(query: str) -> TokenList:
|
||||
return tokens
|
||||
|
||||
|
||||
def _tokens_to_sqlite_match_query(tokens: TokenList) -> str:
|
||||
def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
Convert the list of tokens to a string suitable for passing to sqlite's MATCH.
|
||||
Assume sqlite was compiled with enhanced query syntax.
|
||||
|
||||
Returns the sqlite-formatted query string and the tokenized search terms
|
||||
that can be used as highlights.
|
||||
|
||||
Ref: https://www.sqlite.org/fts3.html#full_text_index_queries
|
||||
"""
|
||||
match_query = []
|
||||
highlights = []
|
||||
for token in tokens:
|
||||
if isinstance(token, str):
|
||||
match_query.append(token)
|
||||
highlights.append(token)
|
||||
elif isinstance(token, Phrase):
|
||||
match_query.append('"' + " ".join(token.phrase) + '"')
|
||||
highlights.append(" ".join(token.phrase))
|
||||
elif token == SearchToken.Not:
|
||||
# TODO: SQLite treats NOT as a *binary* operator. Hopefully a search
|
||||
# term has already been added before this.
|
||||
@@ -916,11 +930,14 @@ def _tokens_to_sqlite_match_query(tokens: TokenList) -> str:
|
||||
else:
|
||||
raise ValueError(f"unknown token {token}")
|
||||
|
||||
return "".join(match_query)
|
||||
return "".join(match_query), highlights
|
||||
|
||||
|
||||
def _parse_query_for_sqlite(search_term: str) -> str:
|
||||
def _parse_query_for_sqlite(search_term: str) -> Tuple[str, List[str]]:
|
||||
"""Takes a plain unicode string from the user and converts it into a form
|
||||
that can be passed to sqllite's matchinfo().
|
||||
|
||||
Returns the converted query string and the tokenized search terms
|
||||
that can be used as highlights.
|
||||
"""
|
||||
return _tokens_to_sqlite_match_query(_tokenize_query(search_term))
|
||||
|
||||
@@ -132,12 +132,16 @@ Changes in SCHEMA_VERSION = 82
|
||||
|
||||
Changes in SCHEMA_VERSION = 83
|
||||
- The event_txn_id is no longer used.
|
||||
|
||||
Changes in SCHEMA_VERSION = 84
|
||||
- No longer assumes that `event_auth_chain_links` holds transitive links, and
|
||||
so read operations must do graph traversal.
|
||||
"""
|
||||
|
||||
|
||||
SCHEMA_COMPAT_VERSION = (
|
||||
# The event_txn_id table and tables from MSC2716 no longer exist.
|
||||
83
|
||||
# Transitive links are no longer written to `event_auth_chain_links`
|
||||
84
|
||||
)
|
||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
--
|
||||
-- This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
--
|
||||
-- Copyright (C) 2023 New Vector, Ltd
|
||||
--
|
||||
-- This program is free software: you can redistribute it and/or modify
|
||||
-- it under the terms of the GNU Affero General Public License as
|
||||
-- published by the Free Software Foundation, either version 3 of the
|
||||
-- License, or (at your option) any later version.
|
||||
--
|
||||
-- See the GNU Affero General Public License for more details:
|
||||
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
|
||||
(8404, 'access_tokens_refresh_token_id_idx', '{}');
|
||||
30
synapse/synapse_rust/rendezvous.pyi
Normal file
30
synapse/synapse_rust/rendezvous.pyi
Normal file
@@ -0,0 +1,30 @@
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2024 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
from twisted.web.iweb import IRequest
|
||||
|
||||
from synapse.server import HomeServer
|
||||
|
||||
class RendezvousHandler:
|
||||
def __init__(
|
||||
self,
|
||||
homeserver: HomeServer,
|
||||
/,
|
||||
capacity: int = 100,
|
||||
max_content_length: int = 4 * 1024, # MSC4108 specifies 4KB
|
||||
eviction_interval: int = 60 * 1000,
|
||||
ttl: int = 60 * 1000,
|
||||
) -> None: ...
|
||||
def handle_post(self, request: IRequest) -> None: ...
|
||||
def handle_get(self, request: IRequest, session_id: str) -> None: ...
|
||||
def handle_put(self, request: IRequest, session_id: str) -> None: ...
|
||||
def handle_delete(self, request: IRequest, session_id: str) -> None: ...
|
||||
@@ -424,3 +424,40 @@ class DeactivateAccountTestCase(HomeserverTestCase):
|
||||
self._store.get_knocked_at_rooms_for_local_user(self.user)
|
||||
)
|
||||
self.assertEqual(len(after_deactivate_knocks), 0)
|
||||
|
||||
def test_membership_is_redacted_upon_deactivation(self) -> None:
|
||||
"""
|
||||
Tests that room membership events are redacted if erasure is requested.
|
||||
"""
|
||||
# Create a room
|
||||
room_id = self.helper.create_room_as(
|
||||
self.user,
|
||||
is_public=True,
|
||||
tok=self.token,
|
||||
)
|
||||
|
||||
# Change the displayname
|
||||
membership_event, _ = self.get_success(
|
||||
self.handler.update_membership(
|
||||
requester=create_requester(self.user),
|
||||
target=UserID.from_string(self.user),
|
||||
room_id=room_id,
|
||||
action=Membership.JOIN,
|
||||
content={"displayname": "Hello World!"},
|
||||
)
|
||||
)
|
||||
|
||||
# Deactivate the account
|
||||
self._deactivate_my_account()
|
||||
|
||||
# Get the all membership event IDs
|
||||
membership_event_ids = self.get_success(
|
||||
self._store.get_membership_event_ids_for_user(self.user, room_id=room_id)
|
||||
)
|
||||
|
||||
# Get the events incl. JSON
|
||||
events = self.get_success(self._store.get_events_as_list(membership_event_ids))
|
||||
|
||||
# Validate that there is no displayname in any of the events
|
||||
for event in events:
|
||||
self.assertTrue("displayname" not in event.content)
|
||||
|
||||
@@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_has_different_keys(self) -> None:
|
||||
"""check that has_different_keys returns True when the keys provided are different to what
|
||||
is in the database."""
|
||||
local_user = "@boris:" + self.hs.hostname
|
||||
keys1 = {
|
||||
"master_key": {
|
||||
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
||||
"user_id": local_user,
|
||||
"usage": ["master"],
|
||||
"keys": {
|
||||
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
||||
},
|
||||
}
|
||||
}
|
||||
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, False)
|
||||
# change the usage => different keys
|
||||
keys1["master_key"]["usage"] = ["develop"]
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, True)
|
||||
keys1["master_key"]["usage"] = ["master"] # reset
|
||||
# change the key => different keys
|
||||
keys1["master_key"]["keys"] = {
|
||||
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs"
|
||||
}
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, True)
|
||||
|
||||
def test_query_devices_remote_sync(self) -> None:
|
||||
"""Tests that querying keys for a remote user that we share a room with,
|
||||
but haven't yet fetched the keys for, returns the cross signing keys
|
||||
|
||||
@@ -435,6 +435,101 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
|
||||
[s2_event],
|
||||
)
|
||||
|
||||
def test_state_includes_changes_on_long_lived_forks(self) -> None:
|
||||
"""State changes that happen on a fork of the DAG must be included in `state`
|
||||
|
||||
Given the following DAG:
|
||||
|
||||
E1
|
||||
↗ ↖
|
||||
| S2
|
||||
| ↑
|
||||
--|------|----
|
||||
E3 |
|
||||
--|------|----
|
||||
| E4
|
||||
| |
|
||||
|
||||
... and a filter that means we only return 1 event, represented by the dashed
|
||||
horizontal lines: `S2` must be included in the `state` section on the second sync.
|
||||
"""
|
||||
alice = self.register_user("alice", "password")
|
||||
alice_tok = self.login(alice, "password")
|
||||
alice_requester = create_requester(alice)
|
||||
room_id = self.helper.create_room_as(alice, is_public=True, tok=alice_tok)
|
||||
|
||||
# Do an initial sync as Alice to get a known starting point.
|
||||
initial_sync_result = self.get_success(
|
||||
self.sync_handler.wait_for_sync_for_user(
|
||||
alice_requester, generate_sync_config(alice)
|
||||
)
|
||||
)
|
||||
last_room_creation_event_id = (
|
||||
initial_sync_result.joined[0].timeline.events[-1].event_id
|
||||
)
|
||||
|
||||
# Send a state event, and a regular event, both using the same prev ID
|
||||
with self._patch_get_latest_events([last_room_creation_event_id]):
|
||||
s2_event = self.helper.send_state(room_id, "s2", {}, tok=alice_tok)[
|
||||
"event_id"
|
||||
]
|
||||
e3_event = self.helper.send(room_id, "e3", tok=alice_tok)["event_id"]
|
||||
|
||||
# Do an incremental sync, this will return E3 but *not* S2 at this
|
||||
# point.
|
||||
incremental_sync = self.get_success(
|
||||
self.sync_handler.wait_for_sync_for_user(
|
||||
alice_requester,
|
||||
generate_sync_config(
|
||||
alice,
|
||||
filter_collection=FilterCollection(
|
||||
self.hs, {"room": {"timeline": {"limit": 1}}}
|
||||
),
|
||||
),
|
||||
since_token=initial_sync_result.next_batch,
|
||||
)
|
||||
)
|
||||
room_sync = incremental_sync.joined[0]
|
||||
self.assertEqual(room_sync.room_id, room_id)
|
||||
self.assertTrue(room_sync.timeline.limited)
|
||||
self.assertEqual(
|
||||
[e.event_id for e in room_sync.timeline.events],
|
||||
[e3_event],
|
||||
)
|
||||
self.assertEqual(
|
||||
[e.event_id for e in room_sync.state.values()],
|
||||
[],
|
||||
)
|
||||
|
||||
# Now send another event that points to S2, but not E3.
|
||||
with self._patch_get_latest_events([s2_event]):
|
||||
e4_event = self.helper.send(room_id, "e4", tok=alice_tok)["event_id"]
|
||||
|
||||
# Doing an incremental sync should return S2 in state.
|
||||
incremental_sync = self.get_success(
|
||||
self.sync_handler.wait_for_sync_for_user(
|
||||
alice_requester,
|
||||
generate_sync_config(
|
||||
alice,
|
||||
filter_collection=FilterCollection(
|
||||
self.hs, {"room": {"timeline": {"limit": 1}}}
|
||||
),
|
||||
),
|
||||
since_token=incremental_sync.next_batch,
|
||||
)
|
||||
)
|
||||
room_sync = incremental_sync.joined[0]
|
||||
self.assertEqual(room_sync.room_id, room_id)
|
||||
self.assertFalse(room_sync.timeline.limited)
|
||||
self.assertEqual(
|
||||
[e.event_id for e in room_sync.timeline.events],
|
||||
[e4_event],
|
||||
)
|
||||
self.assertEqual(
|
||||
[e.event_id for e in room_sync.state.values()],
|
||||
[s2_event],
|
||||
)
|
||||
|
||||
def test_state_includes_changes_on_ungappy_syncs(self) -> None:
|
||||
"""Test `state` where the sync is not gappy.
|
||||
|
||||
|
||||
@@ -277,7 +277,8 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
|
||||
self.assertEqual(
|
||||
"Missing integer query parameter 'before_ts'", channel.json_body["error"]
|
||||
"Missing required integer query parameter before_ts",
|
||||
channel.json_body["error"],
|
||||
)
|
||||
|
||||
def test_invalid_parameter(self) -> None:
|
||||
@@ -320,7 +321,7 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||
self.assertEqual(
|
||||
"Query parameter size_gt must be a string representing a positive integer.",
|
||||
"Query parameter size_gt must be a positive integer.",
|
||||
channel.json_body["error"],
|
||||
)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
import json
|
||||
import time
|
||||
import urllib.parse
|
||||
from http import HTTPStatus
|
||||
from typing import List, Optional
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
@@ -2190,6 +2191,33 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase):
|
||||
chunk = channel.json_body["chunk"]
|
||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||
|
||||
def test_room_message_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={valid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={invalid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
@@ -2522,6 +2550,39 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||
else:
|
||||
self.fail("Event %s from events_after not found" % j)
|
||||
|
||||
def test_room_event_context_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Create a user with room and event_id.
|
||||
user_id = self.register_user("test", "test")
|
||||
user_tok = self.login("test", "test")
|
||||
room_id = self.helper.create_room_as(user_id, tok=user_tok)
|
||||
event_id = self.helper.send(room_id, "message 1", tok=user_tok)["event_id"]
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class MakeRoomAdminTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import datetime
|
||||
import os
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pkg_resources
|
||||
|
||||
@@ -42,6 +43,7 @@ from synapse.types import JsonDict
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
from tests.server import ThreadedMemoryReactorClock
|
||||
from tests.unittest import override_config
|
||||
|
||||
|
||||
@@ -58,6 +60,13 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
||||
config["allow_guest_access"] = True
|
||||
return config
|
||||
|
||||
def make_homeserver(
|
||||
self, reactor: ThreadedMemoryReactorClock, clock: Clock
|
||||
) -> HomeServer:
|
||||
hs = super().make_homeserver(reactor, clock)
|
||||
hs.get_send_email_handler()._sendmail = AsyncMock()
|
||||
return hs
|
||||
|
||||
def test_POST_appservice_registration_valid(self) -> None:
|
||||
user_id = "@as_user_kermit:test"
|
||||
as_token = "i_am_an_app_service"
|
||||
|
||||
@@ -35,7 +35,6 @@ from synapse.util import Clock
|
||||
from tests import unittest
|
||||
from tests.server import FakeChannel
|
||||
from tests.test_utils.event_injection import inject_event
|
||||
from tests.unittest import override_config
|
||||
|
||||
|
||||
class BaseRelationsTestCase(unittest.HomeserverTestCase):
|
||||
@@ -957,7 +956,6 @@ class RelationPaginationTestCase(BaseRelationsTestCase):
|
||||
|
||||
|
||||
class RecursiveRelationTestCase(BaseRelationsTestCase):
|
||||
@override_config({"experimental_features": {"msc3981_recurse_relations": True}})
|
||||
def test_recursive_relations(self) -> None:
|
||||
"""Generate a complex, multi-level relationship tree and query it."""
|
||||
# Create a thread with a few messages in it.
|
||||
@@ -1003,7 +1001,7 @@ class RecursiveRelationTestCase(BaseRelationsTestCase):
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}"
|
||||
"?dir=f&limit=20&org.matrix.msc3981.recurse=true",
|
||||
"?dir=f&limit=20&recurse=true",
|
||||
access_token=self.user_token,
|
||||
)
|
||||
self.assertEqual(200, channel.code, channel.json_body)
|
||||
@@ -1024,7 +1022,6 @@ class RecursiveRelationTestCase(BaseRelationsTestCase):
|
||||
],
|
||||
)
|
||||
|
||||
@override_config({"experimental_features": {"msc3981_recurse_relations": True}})
|
||||
def test_recursive_relations_with_filter(self) -> None:
|
||||
"""The event_type and rel_type still apply."""
|
||||
# Create a thread with a few messages in it.
|
||||
@@ -1052,7 +1049,7 @@ class RecursiveRelationTestCase(BaseRelationsTestCase):
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}/{RelationTypes.ANNOTATION}"
|
||||
"?dir=f&limit=20&org.matrix.msc3981.recurse=true",
|
||||
"?dir=f&limit=20&recurse=true",
|
||||
access_token=self.user_token,
|
||||
)
|
||||
self.assertEqual(200, channel.code, channel.json_body)
|
||||
@@ -1065,7 +1062,7 @@ class RecursiveRelationTestCase(BaseRelationsTestCase):
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}/{RelationTypes.ANNOTATION}/m.reaction"
|
||||
"?dir=f&limit=20&org.matrix.msc3981.recurse=true",
|
||||
"?dir=f&limit=20&recurse=true",
|
||||
access_token=self.user_token,
|
||||
)
|
||||
self.assertEqual(200, channel.code, channel.json_body)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
# Copyright (C) 2023-2024 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
@@ -19,16 +19,23 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Dict
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
from synapse.rest.client import rendezvous
|
||||
from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
from tests.unittest import override_config
|
||||
from tests.utils import HAS_AUTHLIB
|
||||
|
||||
endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||
msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||
msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||
|
||||
|
||||
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||
@@ -40,12 +47,430 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||
self.hs = self.setup_test_homeserver()
|
||||
return self.hs
|
||||
|
||||
def create_resource_dict(self) -> Dict[str, Resource]:
|
||||
return {
|
||||
**super().create_resource_dict(),
|
||||
"/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs),
|
||||
}
|
||||
|
||||
def test_disabled(self) -> None:
|
||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
||||
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 404)
|
||||
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 404)
|
||||
|
||||
@override_config({"experimental_features": {"msc3886_endpoint": "/asd"}})
|
||||
def test_redirect(self) -> None:
|
||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
||||
def test_msc3886_redirect(self) -> None:
|
||||
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 307)
|
||||
self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"])
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_delegation_endpoint": "https://asd",
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_delegation(self) -> None:
|
||||
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 307)
|
||||
self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"])
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_enabled": True,
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108(self) -> None:
|
||||
"""
|
||||
Test the MSC4108 rendezvous endpoint, including:
|
||||
- Creating a session
|
||||
- Getting the data back
|
||||
- Updating the data
|
||||
- Deleting the data
|
||||
- ETag handling
|
||||
"""
|
||||
# We can post arbitrary data to the endpoint
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
self.assertSubstring("/_synapse/client/rendezvous/", channel.json_body["url"])
|
||||
headers = dict(channel.headers.getAllRawHeaders())
|
||||
self.assertIn(b"ETag", headers)
|
||||
self.assertIn(b"Expires", headers)
|
||||
self.assertEqual(headers[b"Content-Type"], [b"application/json"])
|
||||
self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"])
|
||||
self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"])
|
||||
self.assertEqual(headers[b"Cache-Control"], [b"no-store"])
|
||||
self.assertEqual(headers[b"Pragma"], [b"no-cache"])
|
||||
self.assertIn("url", channel.json_body)
|
||||
self.assertTrue(channel.json_body["url"].startswith("https://"))
|
||||
|
||||
url = urlparse(channel.json_body["url"])
|
||||
session_endpoint = url.path
|
||||
etag = headers[b"ETag"][0]
|
||||
|
||||
# We can get the data back
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
headers = dict(channel.headers.getAllRawHeaders())
|
||||
self.assertEqual(headers[b"ETag"], [etag])
|
||||
self.assertIn(b"Expires", headers)
|
||||
self.assertEqual(headers[b"Content-Type"], [b"text/plain"])
|
||||
self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"])
|
||||
self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"])
|
||||
self.assertEqual(headers[b"Cache-Control"], [b"no-store"])
|
||||
self.assertEqual(headers[b"Pragma"], [b"no-cache"])
|
||||
self.assertEqual(channel.text_body, "foo=bar")
|
||||
|
||||
# We can make sure the data hasn't changed
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
custom_headers=[("If-None-Match", etag)],
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 304)
|
||||
|
||||
# We can update the data
|
||||
channel = self.make_request(
|
||||
"PUT",
|
||||
session_endpoint,
|
||||
"foo=baz",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
custom_headers=[("If-Match", etag)],
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 202)
|
||||
headers = dict(channel.headers.getAllRawHeaders())
|
||||
old_etag = etag
|
||||
new_etag = headers[b"ETag"][0]
|
||||
|
||||
# If we try to update it again with the old etag, it should fail
|
||||
channel = self.make_request(
|
||||
"PUT",
|
||||
session_endpoint,
|
||||
"bar=baz",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
custom_headers=[("If-Match", old_etag)],
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 412)
|
||||
self.assertEqual(channel.json_body["errcode"], "M_UNKNOWN")
|
||||
self.assertEqual(
|
||||
channel.json_body["org.matrix.msc4108.errcode"], "M_CONCURRENT_WRITE"
|
||||
)
|
||||
|
||||
# If we try to get with the old etag, we should get the updated data
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
custom_headers=[("If-None-Match", old_etag)],
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
headers = dict(channel.headers.getAllRawHeaders())
|
||||
self.assertEqual(headers[b"ETag"], [new_etag])
|
||||
self.assertEqual(channel.text_body, "foo=baz")
|
||||
|
||||
# We can delete the data
|
||||
channel = self.make_request(
|
||||
"DELETE",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 204)
|
||||
|
||||
# If we try to get the data again, it should fail
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 404)
|
||||
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_enabled": True,
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_expiration(self) -> None:
|
||||
"""
|
||||
Test that entries are evicted after a TTL.
|
||||
"""
|
||||
# Start a new session
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||
|
||||
# Sanity check that we can get the data back
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(channel.text_body, "foo=bar")
|
||||
|
||||
# Advance the clock, TTL of entries is 1 minute
|
||||
self.reactor.advance(60)
|
||||
|
||||
# Get the data back, it should be gone
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 404)
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_enabled": True,
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_capacity(self) -> None:
|
||||
"""
|
||||
Test that a capacity limit is enforced on the rendezvous sessions, as old
|
||||
entries are evicted at an interval when the limit is reached.
|
||||
"""
|
||||
# Start a new session
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||
|
||||
# Sanity check that we can get the data back
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(channel.text_body, "foo=bar")
|
||||
|
||||
# Start a lot of new sessions
|
||||
for _ in range(100):
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
|
||||
# Get the data back, it should still be there, as the eviction hasn't run yet
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 200)
|
||||
|
||||
# Advance the clock, as it will trigger the eviction
|
||||
self.reactor.advance(1)
|
||||
|
||||
# Get the data back, it should be gone
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_enabled": True,
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_hard_capacity(self) -> None:
|
||||
"""
|
||||
Test that a hard capacity limit is enforced on the rendezvous sessions, as old
|
||||
entries are evicted immediately when the limit is reached.
|
||||
"""
|
||||
# Start a new session
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||
# We advance the clock to make sure that this entry is the "lowest" in the session list
|
||||
self.reactor.advance(1)
|
||||
|
||||
# Sanity check that we can get the data back
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 200)
|
||||
self.assertEqual(channel.text_body, "foo=bar")
|
||||
|
||||
# Start a lot of new sessions
|
||||
for _ in range(200):
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
|
||||
# Get the data back, it should already be gone as we hit the hard limit
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
session_endpoint,
|
||||
access_token=None,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, 404)
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_enabled": True,
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_content_type(self) -> None:
|
||||
"""
|
||||
Test that the content-type is restricted to text/plain.
|
||||
"""
|
||||
# We cannot post invalid content-type arbitrary data to the endpoint
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_is_form=True,
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 400)
|
||||
self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM")
|
||||
|
||||
# Make a valid request
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
msc4108_endpoint,
|
||||
"foo=bar",
|
||||
content_type=b"text/plain",
|
||||
access_token=None,
|
||||
)
|
||||
self.assertEqual(channel.code, 201)
|
||||
url = urlparse(channel.json_body["url"])
|
||||
session_endpoint = url.path
|
||||
headers = dict(channel.headers.getAllRawHeaders())
|
||||
etag = headers[b"ETag"][0]
|
||||
|
||||
# We can't update the data with invalid content-type
|
||||
channel = self.make_request(
|
||||
"PUT",
|
||||
session_endpoint,
|
||||
"foo=baz",
|
||||
content_is_form=True,
|
||||
access_token=None,
|
||||
custom_headers=[("If-Match", etag)],
|
||||
)
|
||||
self.assertEqual(channel.code, 400)
|
||||
self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM")
|
||||
|
||||
@@ -2175,6 +2175,31 @@ class RoomMessageListTestCase(RoomBase):
|
||||
chunk = channel.json_body["chunk"]
|
||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||
|
||||
def test_room_message_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={valid_filter_str}",
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={invalid_filter_str}",
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class RoomMessageFilterTestCase(RoomBase):
|
||||
"""Tests /rooms/$room_id/messages REST events."""
|
||||
@@ -3213,6 +3238,33 @@ class ContextTestCase(unittest.HomeserverTestCase):
|
||||
self.assertDictEqual(events_after[0].get("content"), {}, events_after[0])
|
||||
self.assertEqual(events_after[1].get("content"), {}, events_after[1])
|
||||
|
||||
def test_room_event_context_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
event_id = self.helper.send(self.room_id, "message 7", tok=self.tok)["event_id"]
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||
access_token=self.tok,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||
access_token=self.tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class RoomAliasListTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -351,6 +351,7 @@ def make_request(
|
||||
request: Type[Request] = SynapseRequest,
|
||||
shorthand: bool = True,
|
||||
federation_auth_origin: Optional[bytes] = None,
|
||||
content_type: Optional[bytes] = None,
|
||||
content_is_form: bool = False,
|
||||
await_result: bool = True,
|
||||
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
||||
@@ -373,6 +374,8 @@ def make_request(
|
||||
with the usual REST API path, if it doesn't contain it.
|
||||
federation_auth_origin: if set to not-None, we will add a fake
|
||||
Authorization header pretenting to be the given server name.
|
||||
content_type: The content-type to use for the request. If not set then will default to
|
||||
application/json unless content_is_form is true.
|
||||
content_is_form: Whether the content is URL encoded form data. Adds the
|
||||
'Content-Type': 'application/x-www-form-urlencoded' header.
|
||||
await_result: whether to wait for the request to complete rendering. If true,
|
||||
@@ -436,7 +439,9 @@ def make_request(
|
||||
)
|
||||
|
||||
if content:
|
||||
if content_is_form:
|
||||
if content_type is not None:
|
||||
req.requestHeaders.addRawHeader(b"Content-Type", content_type)
|
||||
elif content_is_form:
|
||||
req.requestHeaders.addRawHeader(
|
||||
b"Content-Type", b"application/x-www-form-urlencoded"
|
||||
)
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
|
||||
from typing import Dict, List, Set, Tuple, cast
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
from twisted.trial import unittest
|
||||
|
||||
@@ -45,7 +47,8 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
self.store = hs.get_datastores().main
|
||||
self._next_stream_ordering = 1
|
||||
|
||||
def test_simple(self) -> None:
|
||||
@parameterized.expand([(False,), (True,)])
|
||||
def test_simple(self, batched: bool) -> None:
|
||||
"""Test that the example in `docs/auth_chain_difference_algorithm.md`
|
||||
works.
|
||||
"""
|
||||
@@ -53,6 +56,7 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
event_factory = self.hs.get_event_builder_factory()
|
||||
bob = "@creator:test"
|
||||
alice = "@alice:test"
|
||||
charlie = "@charlie:test"
|
||||
room_id = "!room:test"
|
||||
|
||||
# Ensure that we have a rooms entry so that we generate the chain index.
|
||||
@@ -191,6 +195,26 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
)
|
||||
)
|
||||
|
||||
charlie_invite = self.get_success(
|
||||
event_factory.for_room_version(
|
||||
RoomVersions.V6,
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": charlie,
|
||||
"sender": alice,
|
||||
"room_id": room_id,
|
||||
"content": {"tag": "charlie_invite"},
|
||||
},
|
||||
).build(
|
||||
prev_event_ids=[],
|
||||
auth_event_ids=[
|
||||
create.event_id,
|
||||
alice_join2.event_id,
|
||||
power_2.event_id,
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
events = [
|
||||
create,
|
||||
bob_join,
|
||||
@@ -200,33 +224,41 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
bob_join_2,
|
||||
power_2,
|
||||
alice_join2,
|
||||
charlie_invite,
|
||||
]
|
||||
|
||||
expected_links = [
|
||||
(bob_join, create),
|
||||
(power, create),
|
||||
(power, bob_join),
|
||||
(alice_invite, create),
|
||||
(alice_invite, power),
|
||||
(alice_invite, bob_join),
|
||||
(bob_join_2, power),
|
||||
(alice_join2, power_2),
|
||||
(charlie_invite, alice_join2),
|
||||
]
|
||||
|
||||
self.persist(events)
|
||||
# We either persist as a batch or one-by-one depending on test
|
||||
# parameter.
|
||||
if batched:
|
||||
self.persist(events)
|
||||
else:
|
||||
for event in events:
|
||||
self.persist([event])
|
||||
|
||||
chain_map, link_map = self.fetch_chains(events)
|
||||
|
||||
# Check that the expected links and only the expected links have been
|
||||
# added.
|
||||
self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
|
||||
event_map = {e.event_id: e for e in events}
|
||||
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
||||
|
||||
for start, end in expected_links:
|
||||
start_id, start_seq = chain_map[start.event_id]
|
||||
end_id, end_seq = chain_map[end.event_id]
|
||||
|
||||
self.assertIn(
|
||||
(start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
|
||||
)
|
||||
self.maxDiff = None
|
||||
self.assertCountEqual(
|
||||
expected_links,
|
||||
[
|
||||
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
||||
for s1, s2, t1, t2 in link_map.get_additions()
|
||||
],
|
||||
)
|
||||
|
||||
# Test that everything can reach the create event, but the create event
|
||||
# can't reach anything.
|
||||
@@ -368,24 +400,23 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
|
||||
expected_links = [
|
||||
(bob_join, create),
|
||||
(power, create),
|
||||
(power, bob_join),
|
||||
(alice_invite, create),
|
||||
(alice_invite, power),
|
||||
(alice_invite, bob_join),
|
||||
]
|
||||
|
||||
# Check that the expected links and only the expected links have been
|
||||
# added.
|
||||
self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
|
||||
event_map = {e.event_id: e for e in events}
|
||||
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
||||
|
||||
for start, end in expected_links:
|
||||
start_id, start_seq = chain_map[start.event_id]
|
||||
end_id, end_seq = chain_map[end.event_id]
|
||||
|
||||
self.assertIn(
|
||||
(start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
|
||||
)
|
||||
self.maxDiff = None
|
||||
self.assertCountEqual(
|
||||
expected_links,
|
||||
[
|
||||
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
||||
for s1, s2, t1, t2 in link_map.get_additions()
|
||||
],
|
||||
)
|
||||
|
||||
def persist(
|
||||
self,
|
||||
@@ -489,8 +520,6 @@ class LinkMapTestCase(unittest.TestCase):
|
||||
link_map = _LinkMap()
|
||||
|
||||
link_map.add_link((1, 1), (2, 1), new=False)
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
|
||||
self.assertCountEqual(link_map.get_links_from((1, 1)), [(2, 1)])
|
||||
self.assertCountEqual(link_map.get_additions(), [])
|
||||
self.assertTrue(link_map.exists_path_from((1, 5), (2, 1)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 5), (2, 2)))
|
||||
@@ -499,18 +528,31 @@ class LinkMapTestCase(unittest.TestCase):
|
||||
|
||||
# Attempting to add a redundant link is ignored.
|
||||
self.assertFalse(link_map.add_link((1, 4), (2, 1)))
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
|
||||
self.assertCountEqual(link_map.get_additions(), [])
|
||||
|
||||
# Adding new non-redundant links works
|
||||
self.assertTrue(link_map.add_link((1, 3), (2, 3)))
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
|
||||
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3)])
|
||||
|
||||
self.assertTrue(link_map.add_link((2, 5), (1, 3)))
|
||||
self.assertCountEqual(link_map.get_links_between(2, 1), [(5, 3)])
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
|
||||
|
||||
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3), (2, 5, 1, 3)])
|
||||
|
||||
def test_exists_path_from(self) -> None:
|
||||
"Check that `exists_path_from` can handle non-direct links"
|
||||
link_map = _LinkMap()
|
||||
|
||||
link_map.add_link((1, 1), (2, 1), new=False)
|
||||
link_map.add_link((2, 1), (3, 1), new=False)
|
||||
|
||||
self.assertTrue(link_map.exists_path_from((1, 4), (3, 1)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
||||
|
||||
link_map.add_link((1, 5), (2, 3), new=False)
|
||||
link_map.add_link((2, 2), (3, 3), new=False)
|
||||
|
||||
self.assertTrue(link_map.exists_path_from((1, 6), (3, 2)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
||||
|
||||
|
||||
class EventChainBackgroundUpdateTestCase(HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -71,17 +71,16 @@ class EventSearchInsertionTest(HomeserverTestCase):
|
||||
store.search_msgs([room_id], "hi bob", ["content.body"])
|
||||
)
|
||||
self.assertEqual(result.get("count"), 1)
|
||||
if isinstance(store.database_engine, PostgresEngine):
|
||||
self.assertIn("hi", result.get("highlights"))
|
||||
self.assertIn("bob", result.get("highlights"))
|
||||
self.assertIn("hi", result.get("highlights"))
|
||||
self.assertIn("bob", result.get("highlights"))
|
||||
|
||||
# Check that search works for an unrelated message
|
||||
result = self.get_success(
|
||||
store.search_msgs([room_id], "another", ["content.body"])
|
||||
)
|
||||
self.assertEqual(result.get("count"), 1)
|
||||
if isinstance(store.database_engine, PostgresEngine):
|
||||
self.assertIn("another", result.get("highlights"))
|
||||
|
||||
self.assertIn("another", result.get("highlights"))
|
||||
|
||||
# Check that search works for a search term that overlaps with the message
|
||||
# containing a null byte and an unrelated message.
|
||||
@@ -90,8 +89,8 @@ class EventSearchInsertionTest(HomeserverTestCase):
|
||||
result = self.get_success(
|
||||
store.search_msgs([room_id], "hi alice", ["content.body"])
|
||||
)
|
||||
if isinstance(store.database_engine, PostgresEngine):
|
||||
self.assertIn("alice", result.get("highlights"))
|
||||
|
||||
self.assertIn("alice", result.get("highlights"))
|
||||
|
||||
def test_non_string(self) -> None:
|
||||
"""Test that non-string `value`s are not inserted into `event_search`.
|
||||
|
||||
@@ -523,6 +523,7 @@ class HomeserverTestCase(TestCase):
|
||||
request: Type[Request] = SynapseRequest,
|
||||
shorthand: bool = True,
|
||||
federation_auth_origin: Optional[bytes] = None,
|
||||
content_type: Optional[bytes] = None,
|
||||
content_is_form: bool = False,
|
||||
await_result: bool = True,
|
||||
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
||||
@@ -541,6 +542,9 @@ class HomeserverTestCase(TestCase):
|
||||
with the usual REST API path, if it doesn't contain it.
|
||||
federation_auth_origin: if set to not-None, we will add a fake
|
||||
Authorization header pretenting to be the given server name.
|
||||
|
||||
content_type: The content-type to use for the request. If not set then will default to
|
||||
application/json unless content_is_form is true.
|
||||
content_is_form: Whether the content is URL encoded form data. Adds the
|
||||
'Content-Type': 'application/x-www-form-urlencoded' header.
|
||||
|
||||
@@ -566,6 +570,7 @@ class HomeserverTestCase(TestCase):
|
||||
request,
|
||||
shorthand,
|
||||
federation_auth_origin,
|
||||
content_type,
|
||||
content_is_form,
|
||||
await_result,
|
||||
custom_headers,
|
||||
|
||||
Reference in New Issue
Block a user