mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-09 01:30:18 +00:00
Compare commits
146 Commits
erikj/rele
...
erikj/acco
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b2e37e580 | ||
|
|
7d5484ea0d | ||
|
|
65dc3aa5b8 | ||
|
|
48ab85f276 | ||
|
|
68c5cd8c0b | ||
|
|
aae9e912de | ||
|
|
03937a1cae | ||
|
|
285de43e48 | ||
|
|
4900438712 | ||
|
|
cf982d2e32 | ||
|
|
7589565edd | ||
|
|
7ed23e072e | ||
|
|
4ac783549c | ||
|
|
1cb84aaab5 | ||
|
|
9b83fb7c16 | ||
|
|
c5b4be6d07 | ||
|
|
4c66a7cbed | ||
|
|
ebad618bf0 | ||
|
|
16af80b8fb | ||
|
|
e4a1f271b9 | ||
|
|
6b131a99fe | ||
|
|
76f7c91e44 | ||
|
|
b732d13d4c | ||
|
|
596b96411b | ||
|
|
f6c2b0ec2e | ||
|
|
1c1eaf7b5f | ||
|
|
20be70dae4 | ||
|
|
119b7527fb | ||
|
|
a7fcac5648 | ||
|
|
e06e3c4004 | ||
|
|
60441059a3 | ||
|
|
1b197752b6 | ||
|
|
598a83d005 | ||
|
|
be603de2cb | ||
|
|
62523571ae | ||
|
|
5562a89168 | ||
|
|
59bcbcec0a | ||
|
|
d8b926d323 | ||
|
|
2efed1d4fb | ||
|
|
cd24bc2f36 | ||
|
|
a193d4a1b5 | ||
|
|
b3047f3f17 | ||
|
|
9689ac3294 | ||
|
|
588e5b521d | ||
|
|
515c1cc0a1 | ||
|
|
e1ed959a68 | ||
|
|
5c229415c4 | ||
|
|
a3c49565ff | ||
|
|
5389374ef8 | ||
|
|
e5d07bb083 | ||
|
|
a708e1afd0 | ||
|
|
786de8570b | ||
|
|
d5accec2e5 | ||
|
|
de3363ef58 | ||
|
|
6b770d8bfc | ||
|
|
f73c844403 | ||
|
|
b09bcf16d9 | ||
|
|
b054690c8c | ||
|
|
dce38f3faf | ||
|
|
fc10d38849 | ||
|
|
4255c03599 | ||
|
|
c24cce73a1 | ||
|
|
1c5d2a4197 | ||
|
|
391c4f870b | ||
|
|
5eec67b6ef | ||
|
|
6722adf04e | ||
|
|
ac27c9e46a | ||
|
|
f729ef08c9 | ||
|
|
7d52ce7d4b | ||
|
|
709b7363fe | ||
|
|
560b43ac02 | ||
|
|
8b6ff1dba5 | ||
|
|
b4d0356e48 | ||
|
|
d52c17ce01 | ||
|
|
966a50bb63 | ||
|
|
d6125c583d | ||
|
|
da58e55a0b | ||
|
|
a5a454fc35 | ||
|
|
1caff75526 | ||
|
|
7b75922020 | ||
|
|
26c1330764 | ||
|
|
48303fcbcc | ||
|
|
53a3783750 | ||
|
|
b913aaa788 | ||
|
|
dab88a7b1f | ||
|
|
ca69d0f571 | ||
|
|
02ebcf7725 | ||
|
|
cdd5979129 | ||
|
|
89801e04ca | ||
|
|
7098d47f29 | ||
|
|
26f81fb5be | ||
|
|
d844afdc29 | ||
|
|
bb80894391 | ||
|
|
e43c2b023e | ||
|
|
2999a14aed | ||
|
|
1a6b718f8c | ||
|
|
594cd5f9fd | ||
|
|
b21134de3b | ||
|
|
a8f29c9913 | ||
|
|
9eed8cd878 | ||
|
|
8678516e79 | ||
|
|
573c6d7e69 | ||
|
|
689641b903 | ||
|
|
e75a23a63d | ||
|
|
e563e4bdf3 | ||
|
|
f4032d3e71 | ||
|
|
8da16e55fe | ||
|
|
d9cc0faf4b | ||
|
|
cca77af68f | ||
|
|
48742da536 | ||
|
|
940b932405 | ||
|
|
a2b2f6d09b | ||
|
|
defd4aca67 | ||
|
|
b4d95409fb | ||
|
|
f1a1c7fc53 | ||
|
|
cb9fa062b7 | ||
|
|
74b75cfd54 | ||
|
|
87d13fd143 | ||
|
|
ad2cd9aefd | ||
|
|
ad0ee53993 | ||
|
|
92b38c1afd | ||
|
|
a8e313836d | ||
|
|
7c9684b5dc | ||
|
|
f1e8d2d15a | ||
|
|
10428046e4 | ||
|
|
6eb98a4f1c | ||
|
|
950ba844f7 | ||
|
|
8b8d74d12f | ||
|
|
261e746281 | ||
|
|
993644ded0 | ||
|
|
a5d25bb623 | ||
|
|
f162c92f2a | ||
|
|
9ce489be5e | ||
|
|
fae75b0376 | ||
|
|
f77bfbfa30 | ||
|
|
1892ba5f67 | ||
|
|
a51daffba5 | ||
|
|
b05b2e14bb | ||
|
|
a308d99f30 | ||
|
|
a9fc1fd112 | ||
|
|
6a11bdf01d | ||
|
|
8fea190a1f | ||
|
|
81c19c4cd2 | ||
|
|
aaa3c36420 | ||
|
|
3e7eb45eb1 | ||
|
|
bab37dfc6f |
14
.github/workflows/fix_lint.yaml
vendored
14
.github/workflows/fix_lint.yaml
vendored
@@ -29,17 +29,13 @@ jobs:
|
||||
with:
|
||||
install-project: "false"
|
||||
|
||||
- name: Import order (isort)
|
||||
- name: Run ruff check
|
||||
continue-on-error: true
|
||||
run: poetry run isort .
|
||||
run: poetry run ruff check --fix .
|
||||
|
||||
- name: Code style (black)
|
||||
- name: Run ruff format
|
||||
continue-on-error: true
|
||||
run: poetry run black .
|
||||
|
||||
- name: Semantic checks (ruff)
|
||||
continue-on-error: true
|
||||
run: poetry run ruff --fix .
|
||||
run: poetry run ruff format --quiet .
|
||||
|
||||
- run: cargo clippy --all-features --fix -- -D warnings
|
||||
continue-on-error: true
|
||||
@@ -49,4 +45,4 @@ jobs:
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
commit_message: "Attempt to fix linting"
|
||||
|
||||
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@@ -131,15 +131,11 @@ jobs:
|
||||
with:
|
||||
install-project: "false"
|
||||
|
||||
- name: Import order (isort)
|
||||
run: poetry run isort --check --diff .
|
||||
- name: Run ruff check
|
||||
run: poetry run ruff check --output-format=github .
|
||||
|
||||
- name: Code style (black)
|
||||
run: poetry run black --check --diff .
|
||||
|
||||
- name: Semantic checks (ruff)
|
||||
# --quiet suppresses the update check.
|
||||
run: poetry run ruff check --quiet .
|
||||
- name: Run ruff format
|
||||
run: poetry run ruff format --check .
|
||||
|
||||
lint-mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
187
CHANGES.md
187
CHANGES.md
@@ -1,3 +1,190 @@
|
||||
# Synapse 1.115.0rc2 (2024-09-12)
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting. ([\#17652](https://github.com/element-hq/synapse/issues/17652))
|
||||
- Speed up sliding sync by reducing amount of data pulled out of the database for large rooms. ([\#17683](https://github.com/element-hq/synapse/issues/17683))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.115.0rc1 (2024-09-10)
|
||||
|
||||
### Features
|
||||
|
||||
- Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. ([\#17509](https://github.com/element-hq/synapse/issues/17509))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Return `400 M_BAD_JSON` upon attempting to complete various room actions with a non-local user ID and unknown room ID, rather than an internal server error. ([\#17607](https://github.com/element-hq/synapse/issues/17607))
|
||||
- Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17626](https://github.com/element-hq/synapse/issues/17626))
|
||||
- Fix bug where we returned the wrong `bump_stamp` for invites in sliding sync response, causing incorrect ordering of invites in the room list. ([\#17674](https://github.com/element-hq/synapse/issues/17674))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Clarify that the admin api resource is only loaded on the main process and not workers. ([\#17590](https://github.com/element-hq/synapse/issues/17590))
|
||||
- Fixed typo in `saml2_config` config [example](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#saml2_config). ([\#17594](https://github.com/element-hq/synapse/issues/17594))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Stabilise [MSC4156](https://github.com/matrix-org/matrix-spec-proposals/pull/4156) by removing the `msc4156_enabled` config setting and defaulting it to `true`. ([\#17650](https://github.com/element-hq/synapse/issues/17650))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Update [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) implementation: load the issuer and account management URLs from OIDC discovery. ([\#17407](https://github.com/element-hq/synapse/issues/17407))
|
||||
- Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting. ([\#17512](https://github.com/element-hq/synapse/issues/17512), [\#17632](https://github.com/element-hq/synapse/issues/17632), [\#17633](https://github.com/element-hq/synapse/issues/17633), [\#17634](https://github.com/element-hq/synapse/issues/17634), [\#17635](https://github.com/element-hq/synapse/issues/17635), [\#17636](https://github.com/element-hq/synapse/issues/17636), [\#17641](https://github.com/element-hq/synapse/issues/17641), [\#17654](https://github.com/element-hq/synapse/issues/17654), [\#17673](https://github.com/element-hq/synapse/issues/17673))
|
||||
- Store sliding sync per-connection state in the database. ([\#17599](https://github.com/element-hq/synapse/issues/17599), [\#17631](https://github.com/element-hq/synapse/issues/17631))
|
||||
- Make the sliding sync `PerConnectionState` class immutable. ([\#17600](https://github.com/element-hq/synapse/issues/17600))
|
||||
- Replace `isort` and `black` with `ruff`. ([\#17620](https://github.com/element-hq/synapse/issues/17620), [\#17643](https://github.com/element-hq/synapse/issues/17643))
|
||||
- Sliding Sync: Split up `get_room_membership_for_user_at_to_token`. ([\#17629](https://github.com/element-hq/synapse/issues/17629))
|
||||
- Use new database tables for sliding sync. ([\#17630](https://github.com/element-hq/synapse/issues/17630), [\#17649](https://github.com/element-hq/synapse/issues/17649))
|
||||
- Prevent duplicate tags being added to Sliding Sync traces. ([\#17655](https://github.com/element-hq/synapse/issues/17655))
|
||||
- Get `bump_stamp` from [new sliding sync tables](https://github.com/element-hq/synapse/pull/17512) which should be faster. ([\#17658](https://github.com/element-hq/synapse/issues/17658))
|
||||
- Speed up incremental Sliding Sync requests by avoiding extra work. ([\#17665](https://github.com/element-hq/synapse/issues/17665))
|
||||
- Small performance improvement in speeding up sliding sync. ([\#17666](https://github.com/element-hq/synapse/issues/17666), [\#17670](https://github.com/element-hq/synapse/issues/17670), [\#17672](https://github.com/element-hq/synapse/issues/17672))
|
||||
- Speed up sliding sync by reducing number of database calls. ([\#17684](https://github.com/element-hq/synapse/issues/17684))
|
||||
- Speed up sync by pulling out fewer events from the database. ([\#17688](https://github.com/element-hq/synapse/issues/17688))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump authlib from 1.3.1 to 1.3.2. ([\#17679](https://github.com/element-hq/synapse/issues/17679))
|
||||
* Bump idna from 3.7 to 3.8. ([\#17682](https://github.com/element-hq/synapse/issues/17682))
|
||||
* Bump ruff from 0.6.2 to 0.6.4. ([\#17680](https://github.com/element-hq/synapse/issues/17680))
|
||||
* Bump towncrier from 24.7.1 to 24.8.0. ([\#17645](https://github.com/element-hq/synapse/issues/17645))
|
||||
* Bump twisted from 24.7.0rc1 to 24.7.0. ([\#17647](https://github.com/element-hq/synapse/issues/17647))
|
||||
* Bump types-pillow from 10.2.0.20240520 to 10.2.0.20240822. ([\#17644](https://github.com/element-hq/synapse/issues/17644))
|
||||
* Bump types-psycopg2 from 2.9.21.20240417 to 2.9.21.20240819. ([\#17646](https://github.com/element-hq/synapse/issues/17646))
|
||||
* Bump types-setuptools from 71.1.0.20240818 to 74.1.0.20240907. ([\#17681](https://github.com/element-hq/synapse/issues/17681))
|
||||
|
||||
# Synapse 1.114.0 (2024-09-02)
|
||||
|
||||
This release enables support for
|
||||
[MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) —
|
||||
Simplified Sliding Sync. This allows using the upcoming releases of the Element
|
||||
X mobile apps without having to run a Sliding Sync Proxy.
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
- Enable native sliding sync support ([MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) and [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186)) by default. ([\#17648](https://github.com/element-hq/synapse/issues/17648))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.114.0rc3 (2024-08-30)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix regression in v1.114.0rc2 that caused workers to fail to start. ([\#17626](https://github.com/element-hq/synapse/issues/17626))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.114.0rc2 (2024-08-30)
|
||||
|
||||
### Features
|
||||
|
||||
- Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. ([\#17509](https://github.com/element-hq/synapse/issues/17509))
|
||||
- Make `hash_password` script accept password input from stdin. ([\#17608](https://github.com/element-hq/synapse/issues/17608))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix hierarchy returning 403 when room is accessible through federation. Contributed by Krishan (@kfiven). ([\#17194](https://github.com/element-hq/synapse/issues/17194))
|
||||
- Fix content-length on federation `/thumbnail` responses. ([\#17532](https://github.com/element-hq/synapse/issues/17532))
|
||||
- Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17543](https://github.com/element-hq/synapse/issues/17543))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- MSC3861: load the issuer and account management URLs from OIDC discovery. ([\#17407](https://github.com/element-hq/synapse/issues/17407))
|
||||
- Refactor sliding sync class into multiple files. ([\#17595](https://github.com/element-hq/synapse/issues/17595))
|
||||
- Store sliding sync per-connection state in the database. ([\#17599](https://github.com/element-hq/synapse/issues/17599))
|
||||
- Make the sliding sync `PerConnectionState` class immutable. ([\#17600](https://github.com/element-hq/synapse/issues/17600))
|
||||
- Add support to `@tag_args` for standalone functions. ([\#17604](https://github.com/element-hq/synapse/issues/17604))
|
||||
- Speed up incremental syncs in sliding sync by adding some more caching. ([\#17606](https://github.com/element-hq/synapse/issues/17606))
|
||||
- Always return the user's own read receipts in sliding sync. ([\#17617](https://github.com/element-hq/synapse/issues/17617))
|
||||
- Replace `isort` and `black` with `ruff`. ([\#17620](https://github.com/element-hq/synapse/issues/17620))
|
||||
- Refactor sliding sync code to move room list logic out into a separate class. ([\#17622](https://github.com/element-hq/synapse/issues/17622))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump attrs from 23.2.0 to 24.2.0. ([\#17609](https://github.com/element-hq/synapse/issues/17609))
|
||||
* Bump cryptography from 42.0.8 to 43.0.0. ([\#17584](https://github.com/element-hq/synapse/issues/17584))
|
||||
* Bump phonenumbers from 8.13.43 to 8.13.44. ([\#17610](https://github.com/element-hq/synapse/issues/17610))
|
||||
* Bump pygithub from 2.3.0 to 2.4.0. ([\#17612](https://github.com/element-hq/synapse/issues/17612))
|
||||
* Bump pyyaml from 6.0.1 to 6.0.2. ([\#17611](https://github.com/element-hq/synapse/issues/17611))
|
||||
* Bump sentry-sdk from 2.12.0 to 2.13.0. ([\#17585](https://github.com/element-hq/synapse/issues/17585))
|
||||
* Bump serde from 1.0.206 to 1.0.208. ([\#17581](https://github.com/element-hq/synapse/issues/17581))
|
||||
* Bump serde from 1.0.208 to 1.0.209. ([\#17613](https://github.com/element-hq/synapse/issues/17613))
|
||||
* Bump serde_json from 1.0.124 to 1.0.125. ([\#17582](https://github.com/element-hq/synapse/issues/17582))
|
||||
* Bump serde_json from 1.0.125 to 1.0.127. ([\#17614](https://github.com/element-hq/synapse/issues/17614))
|
||||
* Bump types-jsonschema from 4.23.0.20240712 to 4.23.0.20240813. ([\#17583](https://github.com/element-hq/synapse/issues/17583))
|
||||
* Bump types-setuptools from 71.1.0.20240726 to 71.1.0.20240818. ([\#17586](https://github.com/element-hq/synapse/issues/17586))
|
||||
|
||||
# Synapse 1.114.0rc1 (2024-08-20)
|
||||
|
||||
### Features
|
||||
|
||||
- Add a flag to `/versions`, `org.matrix.simplified_msc3575`, to indicate whether experimental sliding sync support has been enabled. ([\#17571](https://github.com/element-hq/synapse/issues/17571))
|
||||
- Handle changes in `timeline_limit` in experimental sliding sync. ([\#17579](https://github.com/element-hq/synapse/issues/17579))
|
||||
- Correctly track read receipts that should be sent down in experimental sliding sync. ([\#17575](https://github.com/element-hq/synapse/issues/17575), [\#17589](https://github.com/element-hq/synapse/issues/17589), [\#17592](https://github.com/element-hq/synapse/issues/17592))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Start handlers for new media endpoints when media resource configured. ([\#17483](https://github.com/element-hq/synapse/issues/17483))
|
||||
- Fix timeline ordering (using `stream_ordering` instead of topological ordering) in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17510](https://github.com/element-hq/synapse/issues/17510))
|
||||
- Fix experimental sliding sync implementation to remember any updates in rooms that were not sent down immediately. ([\#17535](https://github.com/element-hq/synapse/issues/17535))
|
||||
- Better exclude partially stated rooms if we must await full state in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17538](https://github.com/element-hq/synapse/issues/17538))
|
||||
- Handle lower-case http headers in `_Mulitpart_Parser_Protocol`. ([\#17545](https://github.com/element-hq/synapse/issues/17545))
|
||||
- Fix fetching federation signing keys from servers that omit `old_verify_keys`. Contributed by @tulir @ Beeper. ([\#17568](https://github.com/element-hq/synapse/issues/17568))
|
||||
- Fix bug where we would respond with an error when a remote server asked for media that had a length of 0, using the new multipart federation media endpoint. ([\#17570](https://github.com/element-hq/synapse/issues/17570))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Clarify default behaviour of the
|
||||
[`auto_accept_invites.worker_to_run_on`](https://element-hq.github.io/synapse/develop/usage/configuration/config_documentation.html#auto-accept-invites)
|
||||
option. ([\#17515](https://github.com/element-hq/synapse/issues/17515))
|
||||
- Improve docstrings for profile methods. ([\#17559](https://github.com/element-hq/synapse/issues/17559))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add more tracing to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17514](https://github.com/element-hq/synapse/issues/17514))
|
||||
- Fixup comment in sliding sync implementation. ([\#17531](https://github.com/element-hq/synapse/issues/17531))
|
||||
- Replace override of deprecated method `HTTPAdapter.get_connection` with `get_connection_with_tls_context`. ([\#17536](https://github.com/element-hq/synapse/issues/17536))
|
||||
- Fix performance of device lists in `/key/changes` and sliding sync. ([\#17537](https://github.com/element-hq/synapse/issues/17537), [\#17548](https://github.com/element-hq/synapse/issues/17548))
|
||||
- Bump setuptools from 67.6.0 to 72.1.0. ([\#17542](https://github.com/element-hq/synapse/issues/17542))
|
||||
- Add a utility function for generating random event IDs. ([\#17557](https://github.com/element-hq/synapse/issues/17557))
|
||||
- Speed up responding to media requests. ([\#17558](https://github.com/element-hq/synapse/issues/17558), [\#17561](https://github.com/element-hq/synapse/issues/17561), [\#17564](https://github.com/element-hq/synapse/issues/17564), [\#17566](https://github.com/element-hq/synapse/issues/17566), [\#17567](https://github.com/element-hq/synapse/issues/17567), [\#17569](https://github.com/element-hq/synapse/issues/17569))
|
||||
- Test github token before running release script steps. ([\#17562](https://github.com/element-hq/synapse/issues/17562))
|
||||
- Reduce log spam of multipart files. ([\#17563](https://github.com/element-hq/synapse/issues/17563))
|
||||
- Refactor per-connection state in experimental sliding sync handler. ([\#17574](https://github.com/element-hq/synapse/issues/17574))
|
||||
- Add histogram metrics for sliding sync processing time. ([\#17593](https://github.com/element-hq/synapse/issues/17593))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump bytes from 1.6.1 to 1.7.1. ([\#17526](https://github.com/element-hq/synapse/issues/17526))
|
||||
* Bump lxml from 5.2.2 to 5.3.0. ([\#17550](https://github.com/element-hq/synapse/issues/17550))
|
||||
* Bump phonenumbers from 8.13.42 to 8.13.43. ([\#17551](https://github.com/element-hq/synapse/issues/17551))
|
||||
* Bump regex from 1.10.5 to 1.10.6. ([\#17527](https://github.com/element-hq/synapse/issues/17527))
|
||||
* Bump sentry-sdk from 2.10.0 to 2.12.0. ([\#17553](https://github.com/element-hq/synapse/issues/17553))
|
||||
* Bump serde from 1.0.204 to 1.0.206. ([\#17556](https://github.com/element-hq/synapse/issues/17556))
|
||||
* Bump serde_json from 1.0.122 to 1.0.124. ([\#17555](https://github.com/element-hq/synapse/issues/17555))
|
||||
* Bump sigstore/cosign-installer from 3.5.0 to 3.6.0. ([\#17549](https://github.com/element-hq/synapse/issues/17549))
|
||||
* Bump types-pyyaml from 6.0.12.20240311 to 6.0.12.20240808. ([\#17552](https://github.com/element-hq/synapse/issues/17552))
|
||||
* Bump types-requests from 2.31.0.20240406 to 2.32.0.20240712. ([\#17524](https://github.com/element-hq/synapse/issues/17524))
|
||||
|
||||
# Synapse 1.113.0 (2024-08-13)
|
||||
|
||||
No significant changes since 1.113.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.113.0rc1 (2024-08-06)
|
||||
|
||||
### Features
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.86"
|
||||
version = "1.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -485,18 +485,18 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.206"
|
||||
version = "1.0.210"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
|
||||
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.206"
|
||||
version = "1.0.210"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
|
||||
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -505,9 +505,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.124"
|
||||
version = "1.0.128"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
|
||||
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
|
||||
@@ -158,7 +158,7 @@ it:
|
||||
|
||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||
federation.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Start handlers for new media endpoints when media resource configured.
|
||||
@@ -1 +0,0 @@
|
||||
Fix timeline ordering (using `stream_ordering` instead of topological ordering) in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||
@@ -1 +0,0 @@
|
||||
Add more tracing to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint.
|
||||
@@ -1,3 +0,0 @@
|
||||
Clarify default behaviour of the
|
||||
[`auto_accept_invites.worker_to_run_on`](https://element-hq.github.io/synapse/develop/usage/configuration/config_documentation.html#auto-accept-invites)
|
||||
option.
|
||||
@@ -1 +0,0 @@
|
||||
Fixup comment in sliding sync implementation.
|
||||
@@ -1 +0,0 @@
|
||||
Fix experimental sliding sync implementation to remember any updates in rooms that were not sent down immediately.
|
||||
@@ -1 +0,0 @@
|
||||
Replace override of deprecated method `HTTPAdapter.get_connection` with `get_connection_with_tls_context`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix performance of device lists in `/key/changes` and sliding sync.
|
||||
@@ -1 +0,0 @@
|
||||
Bump setuptools from 67.6.0 to 72.1.0.
|
||||
@@ -1 +0,0 @@
|
||||
Speed up responding to media requests.
|
||||
1
changelog.d/17662.feature
Normal file
1
changelog.d/17662.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for the `tags` and `not_tags` filters for simplified sliding sync.
|
||||
5
changelog.d/17667.misc
Normal file
5
changelog.d/17667.misc
Normal file
@@ -0,0 +1,5 @@
|
||||
Import pydantic objects from the `_pydantic_compat` module.
|
||||
|
||||
This allows `check_pydantic_models.py` to mock those pydantic objects
|
||||
only in the synapse module, and not interfere with pydantic objects in
|
||||
external dependencies.
|
||||
1
changelog.d/17675.feature
Normal file
1
changelog.d/17675.feature
Normal file
@@ -0,0 +1 @@
|
||||
Guests can use the new media endpoints to download media, as described by [MSC4189](https://github.com/matrix-org/matrix-spec-proposals/pull/4189).
|
||||
1
changelog.d/17690.feature
Normal file
1
changelog.d/17690.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add config option `turn_shared_secret_path`.
|
||||
1
changelog.d/17692.bugfix
Normal file
1
changelog.d/17692.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Make sure we get up-to-date state information when using the new Sliding Sync tables to derive room membership.
|
||||
1
changelog.d/17693.misc
Normal file
1
changelog.d/17693.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use Sliding Sync tables as a bulk shortcut for getting the max `event_stream_ordering` of rooms.
|
||||
1
changelog.d/17695.bugfix
Normal file
1
changelog.d/17695.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug where room account data would not correctly be sent down sliding sync for old rooms.
|
||||
1
changelog.d/17696.misc
Normal file
1
changelog.d/17696.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up sliding sync requests a bit where there are many room changes.
|
||||
1
changelog.d/17703.misc
Normal file
1
changelog.d/17703.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor sliding sync filter unit tests so the sliding sync API has better test coverage.
|
||||
1
changelog.d/17707.feature
Normal file
1
changelog.d/17707.feature
Normal file
@@ -0,0 +1 @@
|
||||
Return room tags in Sliding Sync account data extension.
|
||||
@@ -21,7 +21,8 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" Starts a synapse client console. """
|
||||
"""Starts a synapse client console."""
|
||||
|
||||
import argparse
|
||||
import binascii
|
||||
import cmd
|
||||
|
||||
42
debian/changelog
vendored
42
debian/changelog
vendored
@@ -1,3 +1,45 @@
|
||||
matrix-synapse-py3 (1.115.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.115.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Sep 2024 11:10:15 +0100
|
||||
|
||||
matrix-synapse-py3 (1.115.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.115.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Sep 2024 08:39:09 -0600
|
||||
|
||||
matrix-synapse-py3 (1.114.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.114.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Sep 2024 15:14:53 +0100
|
||||
|
||||
matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.114.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 16:38:05 +0100
|
||||
|
||||
matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.114.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 15:35:13 +0100
|
||||
|
||||
matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.114.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Aug 2024 12:55:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.113.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.113.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Aug 2024 14:36:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.113.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.113.0rc1.
|
||||
|
||||
27
debian/hash_password.1
vendored
27
debian/hash_password.1
vendored
@@ -1,10 +1,13 @@
|
||||
.\" generated with Ronn-NG/v0.8.0
|
||||
.\" http://github.com/apjanke/ronn-ng/tree/0.8.0
|
||||
.TH "HASH_PASSWORD" "1" "July 2021" "" ""
|
||||
.\" generated with Ronn-NG/v0.10.1
|
||||
.\" http://github.com/apjanke/ronn-ng/tree/0.10.1
|
||||
.TH "HASH_PASSWORD" "1" "August 2024" ""
|
||||
.SH "NAME"
|
||||
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
||||
.SH "SYNOPSIS"
|
||||
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
|
||||
.TS
|
||||
allbox;
|
||||
\fBhash_password\fR [\fB\-p\fR \fB\-\-password\fR [password]] [\fB\-c\fR \fB\-\-config\fR \fIfile\fR]
|
||||
.TE
|
||||
.SH "DESCRIPTION"
|
||||
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
||||
.P
|
||||
@@ -20,7 +23,7 @@ bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
|
||||
.SH "OPTIONS"
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-password\fR
|
||||
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
||||
Read the password form the command line if [password] is supplied, or from \fBSTDIN\fR\. If not, prompt the user and read the password from the tty prompt\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-config\fR
|
||||
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
||||
@@ -33,7 +36,17 @@ $2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
|
||||
.fi
|
||||
.IP "" 0
|
||||
.P
|
||||
Hash from the STDIN:
|
||||
Hash from the stdin:
|
||||
.IP "" 4
|
||||
.nf
|
||||
$ cat password_file | hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
|
||||
.fi
|
||||
.IP "" 0
|
||||
.P
|
||||
Hash from the prompt:
|
||||
.IP "" 4
|
||||
.nf
|
||||
$ hash_password
|
||||
@@ -53,6 +66,6 @@ $2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
|
||||
.fi
|
||||
.IP "" 0
|
||||
.SH "COPYRIGHT"
|
||||
This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
||||
This man page was written by Rahul De «rahulde@swecha\.net» for Debian GNU/Linux distribution\.
|
||||
.SH "SEE ALSO"
|
||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
|
||||
|
||||
182
debian/hash_password.1.html
vendored
Normal file
182
debian/hash_password.1.html
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv='content-type' content='text/html;charset=utf-8'>
|
||||
<meta name='generator' content='Ronn-NG/v0.10.1 (http://github.com/apjanke/ronn-ng/tree/0.10.1)'>
|
||||
<title>hash_password(1) - Calculate the hash of a new password, so that passwords can be reset</title>
|
||||
<style type='text/css' media='all'>
|
||||
/* style: man */
|
||||
body#manpage {margin:0}
|
||||
.mp {max-width:100ex;padding:0 9ex 1ex 4ex}
|
||||
.mp p,.mp pre,.mp ul,.mp ol,.mp dl {margin:0 0 20px 0}
|
||||
.mp h2 {margin:10px 0 0 0}
|
||||
.mp > p,.mp > pre,.mp > ul,.mp > ol,.mp > dl {margin-left:8ex}
|
||||
.mp h3 {margin:0 0 0 4ex}
|
||||
.mp dt {margin:0;clear:left}
|
||||
.mp dt.flush {float:left;width:8ex}
|
||||
.mp dd {margin:0 0 0 9ex}
|
||||
.mp h1,.mp h2,.mp h3,.mp h4 {clear:left}
|
||||
.mp pre {margin-bottom:20px}
|
||||
.mp pre+h2,.mp pre+h3 {margin-top:22px}
|
||||
.mp h2+pre,.mp h3+pre {margin-top:5px}
|
||||
.mp img {display:block;margin:auto}
|
||||
.mp h1.man-title {display:none}
|
||||
.mp,.mp code,.mp pre,.mp tt,.mp kbd,.mp samp,.mp h3,.mp h4 {font-family:monospace;font-size:14px;line-height:1.42857142857143}
|
||||
.mp h2 {font-size:16px;line-height:1.25}
|
||||
.mp h1 {font-size:20px;line-height:2}
|
||||
.mp {text-align:justify;background:#fff}
|
||||
.mp,.mp code,.mp pre,.mp pre code,.mp tt,.mp kbd,.mp samp {color:#131211}
|
||||
.mp h1,.mp h2,.mp h3,.mp h4 {color:#030201}
|
||||
.mp u {text-decoration:underline}
|
||||
.mp code,.mp strong,.mp b {font-weight:bold;color:#131211}
|
||||
.mp em,.mp var {font-style:italic;color:#232221;text-decoration:none}
|
||||
.mp a,.mp a:link,.mp a:hover,.mp a code,.mp a pre,.mp a tt,.mp a kbd,.mp a samp {color:#0000ff}
|
||||
.mp b.man-ref {font-weight:normal;color:#434241}
|
||||
.mp pre {padding:0 4ex}
|
||||
.mp pre code {font-weight:normal;color:#434241}
|
||||
.mp h2+pre,h3+pre {padding-left:0}
|
||||
ol.man-decor,ol.man-decor li {margin:3px 0 10px 0;padding:0;float:left;width:33%;list-style-type:none;text-transform:uppercase;color:#999;letter-spacing:1px}
|
||||
ol.man-decor {width:100%}
|
||||
ol.man-decor li.tl {text-align:left}
|
||||
ol.man-decor li.tc {text-align:center;letter-spacing:4px}
|
||||
ol.man-decor li.tr {text-align:right;float:right}
|
||||
</style>
|
||||
</head>
|
||||
<!--
|
||||
The following styles are deprecated and will be removed at some point:
|
||||
div#man, div#man ol.man, div#man ol.head, div#man ol.man.
|
||||
|
||||
The .man-page, .man-decor, .man-head, .man-foot, .man-title, and
|
||||
.man-navigation should be used instead.
|
||||
-->
|
||||
<body id='manpage'>
|
||||
<div class='mp' id='man'>
|
||||
|
||||
<div class='man-navigation' style='display:none'>
|
||||
<a href="#NAME">NAME</a>
|
||||
<a href="#SYNOPSIS">SYNOPSIS</a>
|
||||
<a href="#DESCRIPTION">DESCRIPTION</a>
|
||||
<a href="#FILES">FILES</a>
|
||||
<a href="#OPTIONS">OPTIONS</a>
|
||||
<a href="#EXAMPLES">EXAMPLES</a>
|
||||
<a href="#COPYRIGHT">COPYRIGHT</a>
|
||||
<a href="#SEE-ALSO">SEE ALSO</a>
|
||||
</div>
|
||||
|
||||
<ol class='man-decor man-head man head'>
|
||||
<li class='tl'>hash_password(1)</li>
|
||||
<li class='tc'></li>
|
||||
<li class='tr'>hash_password(1)</li>
|
||||
</ol>
|
||||
|
||||
|
||||
|
||||
<h2 id="NAME">NAME</h2>
|
||||
<p class="man-name">
|
||||
<code>hash_password</code> - <span class="man-whatis">Calculate the hash of a new password, so that passwords can be reset</span>
|
||||
</p>
|
||||
<h2 id="SYNOPSIS">SYNOPSIS</h2>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<code>hash_password</code> [<code>-p</code>
|
||||
</td>
|
||||
<td>
|
||||
<code>--password</code> [password]] [<code>-c</code>
|
||||
</td>
|
||||
<td>
|
||||
<code>--config</code> <var>file</var>]</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<h2 id="DESCRIPTION">DESCRIPTION</h2>
|
||||
|
||||
<p><strong>hash_password</strong> calculates the hash of a supplied password using bcrypt.</p>
|
||||
|
||||
<p><code>hash_password</code> takes a password as an parameter either on the command line
|
||||
or the <code>STDIN</code> if not supplied.</p>
|
||||
|
||||
<p>It accepts an YAML file which can be used to specify parameters like the
|
||||
number of rounds for bcrypt and password_config section having the pepper
|
||||
value used for the hashing. By default <code>bcrypt_rounds</code> is set to <strong>12</strong>.</p>
|
||||
|
||||
<p>The hashed password is written on the <code>STDOUT</code>.</p>
|
||||
|
||||
<h2 id="FILES">FILES</h2>
|
||||
|
||||
<p>A sample YAML file accepted by <code>hash_password</code> is described below:</p>
|
||||
|
||||
<p>bcrypt_rounds: 17
|
||||
password_config:
|
||||
pepper: "random hashing pepper"</p>
|
||||
|
||||
<h2 id="OPTIONS">OPTIONS</h2>
|
||||
|
||||
<dl>
|
||||
<dt>
|
||||
<code>-p</code>, <code>--password</code>
|
||||
</dt>
|
||||
<dd>Read the password form the command line if [password] is supplied, or from <code>STDIN</code>.
|
||||
If not, prompt the user and read the password from the tty prompt.
|
||||
It is not recommended to type the password on the command line
|
||||
directly. Use the STDIN instead.</dd>
|
||||
<dt>
|
||||
<code>-c</code>, <code>--config</code>
|
||||
</dt>
|
||||
<dd>Read the supplied YAML <var>file</var> containing the options <code>bcrypt_rounds</code>
|
||||
and the <code>password_config</code> section containing the <code>pepper</code> value.</dd>
|
||||
</dl>
|
||||
|
||||
<h2 id="EXAMPLES">EXAMPLES</h2>
|
||||
|
||||
<p>Hash from the command line:</p>
|
||||
|
||||
<pre><code>$ hash_password -p "p@ssw0rd"
|
||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
||||
</code></pre>
|
||||
|
||||
<p>Hash from the stdin:</p>
|
||||
|
||||
<pre><code>$ cat password_file | hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
||||
</code></pre>
|
||||
|
||||
<p>Hash from the prompt:</p>
|
||||
|
||||
<pre><code>$ hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
||||
</code></pre>
|
||||
|
||||
<p>Using a config file:</p>
|
||||
|
||||
<pre><code>$ hash_password -c config.yml
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$CwI.wBNr.w3kmiUlV3T5s.GT2wH7uebDCovDrCOh18dFedlANK99O
|
||||
</code></pre>
|
||||
|
||||
<h2 id="COPYRIGHT">COPYRIGHT</h2>
|
||||
|
||||
<p>This man page was written by Rahul De «rahulde@swecha.net»
|
||||
for Debian GNU/Linux distribution.</p>
|
||||
|
||||
<h2 id="SEE-ALSO">SEE ALSO</h2>
|
||||
|
||||
<p><span class="man-ref">synctl<span class="s">(1)</span></span>, <span class="man-ref">synapse_port_db<span class="s">(1)</span></span>, <span class="man-ref">register_new_matrix_user<span class="s">(1)</span></span>, <span class="man-ref">synapse_review_recent_signups<span class="s">(1)</span></span></p>
|
||||
|
||||
<ol class='man-decor man-foot man foot'>
|
||||
<li class='tl'></li>
|
||||
<li class='tc'>August 2024</li>
|
||||
<li class='tr'>hash_password(1)</li>
|
||||
</ol>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
13
debian/hash_password.ronn
vendored
13
debian/hash_password.ronn
vendored
@@ -29,8 +29,8 @@ A sample YAML file accepted by `hash_password` is described below:
|
||||
## OPTIONS
|
||||
|
||||
* `-p`, `--password`:
|
||||
Read the password form the command line if [password] is supplied.
|
||||
If not, prompt the user and read the password form the `STDIN`.
|
||||
Read the password form the command line if [password] is supplied, or from `STDIN`.
|
||||
If not, prompt the user and read the password from the tty prompt.
|
||||
It is not recommended to type the password on the command line
|
||||
directly. Use the STDIN instead.
|
||||
|
||||
@@ -45,7 +45,14 @@ Hash from the command line:
|
||||
$ hash_password -p "p@ssw0rd"
|
||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
||||
|
||||
Hash from the STDIN:
|
||||
Hash from the stdin:
|
||||
|
||||
$ cat password_file | hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
||||
|
||||
Hash from the prompt:
|
||||
|
||||
$ hash_password
|
||||
Password:
|
||||
|
||||
@@ -8,9 +8,7 @@ errors in code.
|
||||
|
||||
The necessary tools are:
|
||||
|
||||
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
|
||||
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
|
||||
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and
|
||||
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors and enforce a consistent style; and
|
||||
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
|
||||
|
||||
See [the contributing guide](development/contributing_guide.md#run-the-linters) for instructions
|
||||
|
||||
@@ -509,7 +509,8 @@ Unix socket support (_Added in Synapse 1.89.0_):
|
||||
|
||||
Valid resource names are:
|
||||
|
||||
* `client`: the client-server API (/_matrix/client), and the synapse admin API (/_synapse/admin). Also implies `media` and `static`.
|
||||
* `client`: the client-server API (/_matrix/client). Also implies `media` and `static`.
|
||||
If configuring the main process, the Synapse Admin API (/_synapse/admin) is also implied.
|
||||
|
||||
* `consent`: user consent forms (/_matrix/consent). See [here](../../consent_tracking.md) for more.
|
||||
|
||||
@@ -1765,7 +1766,7 @@ rc_3pid_validation:
|
||||
|
||||
This option sets ratelimiting how often invites can be sent in a room or to a
|
||||
specific user. `per_room` defaults to `per_second: 0.3`, `burst_count: 10`,
|
||||
`per_user` defaults to `per_second: 0.003`, `burst_count: 5`, and `per_issuer`
|
||||
`per_user` defaults to `per_second: 0.003`, `burst_count: 5`, and `per_issuer`
|
||||
defaults to `per_second: 0.3`, `burst_count: 10`.
|
||||
|
||||
Client requests that invite user(s) when [creating a
|
||||
@@ -1966,7 +1967,7 @@ max_image_pixels: 35M
|
||||
---
|
||||
### `remote_media_download_burst_count`
|
||||
|
||||
Remote media downloads are ratelimited using a [leaky bucket algorithm](https://en.wikipedia.org/wiki/Leaky_bucket), where a given "bucket" is keyed to the IP address of the requester when requesting remote media downloads. This configuration option sets the size of the bucket against which the size in bytes of downloads are penalized - if the bucket is full, ie a given number of bytes have already been downloaded, further downloads will be denied until the bucket drains. Defaults to 500MiB. See also `remote_media_download_per_second` which determines the rate at which the "bucket" is emptied and thus has available space to authorize new requests.
|
||||
Remote media downloads are ratelimited using a [leaky bucket algorithm](https://en.wikipedia.org/wiki/Leaky_bucket), where a given "bucket" is keyed to the IP address of the requester when requesting remote media downloads. This configuration option sets the size of the bucket against which the size in bytes of downloads are penalized - if the bucket is full, ie a given number of bytes have already been downloaded, further downloads will be denied until the bucket drains. Defaults to 500MiB. See also `remote_media_download_per_second` which determines the rate at which the "bucket" is emptied and thus has available space to authorize new requests.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -2314,6 +2315,22 @@ Example configuration:
|
||||
```yaml
|
||||
turn_shared_secret: "YOUR_SHARED_SECRET"
|
||||
```
|
||||
---
|
||||
### `turn_shared_secret_path`
|
||||
|
||||
An alternative to [`turn_shared_secret`](#turn_shared_secret):
|
||||
allows the shared secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the shared secret.
|
||||
Synapse reads the shared secret from the given file once at startup.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
turn_shared_secret_path: /path/to/secrets/file
|
||||
```
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
---
|
||||
### `turn_username` and `turn_password`
|
||||
|
||||
@@ -3302,8 +3319,8 @@ saml2_config:
|
||||
contact_person:
|
||||
- given_name: Bob
|
||||
sur_name: "the Sysadmin"
|
||||
email_address": ["admin@example.com"]
|
||||
contact_type": technical
|
||||
email_address: ["admin@example.com"]
|
||||
contact_type: technical
|
||||
|
||||
saml_session_lifetime: 5m
|
||||
|
||||
|
||||
387
poetry.lock
generated
387
poetry.lock
generated
@@ -16,32 +16,32 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "23.2.0"
|
||||
version = "24.2.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
||||
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
||||
{file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
|
||||
{file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||
dev = ["attrs[tests]", "pre-commit"]
|
||||
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
|
||||
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
|
||||
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.3.1"
|
||||
version = "1.3.2"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"},
|
||||
{file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"},
|
||||
{file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"},
|
||||
{file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -105,52 +105,6 @@ files = [
|
||||
tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||
typecheck = ["mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.8.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
|
||||
{file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
|
||||
{file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
|
||||
{file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
|
||||
{file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
|
||||
{file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
|
||||
{file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
|
||||
{file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
|
||||
{file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
|
||||
{file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
|
||||
{file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
|
||||
{file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
|
||||
{file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"},
|
||||
{file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"},
|
||||
{file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"},
|
||||
{file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"},
|
||||
{file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"},
|
||||
{file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"},
|
||||
{file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"},
|
||||
{file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"},
|
||||
{file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
|
||||
{file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.0"
|
||||
mypy-extensions = ">=0.4.3"
|
||||
packaging = ">=22.0"
|
||||
pathspec = ">=0.9.0"
|
||||
platformdirs = ">=2"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
colorama = ["colorama (>=0.4.3)"]
|
||||
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "bleach"
|
||||
version = "6.1.0"
|
||||
@@ -403,43 +357,38 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "42.0.8"
|
||||
version = "43.0.1"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
|
||||
{file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"},
|
||||
{file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"},
|
||||
{file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"},
|
||||
{file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"},
|
||||
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"},
|
||||
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"},
|
||||
{file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"},
|
||||
{file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"},
|
||||
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"},
|
||||
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"},
|
||||
{file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"},
|
||||
{file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -452,7 +401,7 @@ nox = ["nox"]
|
||||
pep8test = ["check-sdist", "click", "mypy", "ruff"]
|
||||
sdist = ["build"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -659,13 +608,13 @@ idna = ">=2.5"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.7"
|
||||
version = "3.8"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||
{file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"},
|
||||
{file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -837,20 +786,6 @@ tomli = {version = "*", markers = "python_version < \"3.11\""}
|
||||
[package.extras]
|
||||
scripts = ["click (>=6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "isort"
|
||||
version = "5.13.2"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
|
||||
{file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
colors = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "jaeger-client"
|
||||
version = "4.8.0"
|
||||
@@ -1499,26 +1434,15 @@ files = [
|
||||
[package.extras]
|
||||
dev = ["jinja2"]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.11.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
|
||||
{file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.43"
|
||||
version = "8.13.44"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "phonenumbers-8.13.43-py2.py3-none-any.whl", hash = "sha256:339e521403fe4dd9c664dbbeb2fe434f9ea5c81e54c0fdfadbaeb53b26a76c27"},
|
||||
{file = "phonenumbers-8.13.43.tar.gz", hash = "sha256:35b904e4a79226eee027fbb467a9aa6f1ab9ffc3c09c91bf14b885c154936726"},
|
||||
{file = "phonenumbers-8.13.44-py2.py3-none-any.whl", hash = "sha256:52cd02865dab1428ca9e89d442629b61d407c7dc687cfb80a3e8d068a584513c"},
|
||||
{file = "phonenumbers-8.13.44.tar.gz", hash = "sha256:2175021e84ee4e41b43c890f2d0af51f18c6ca9ad525886d6d6e4ea882e46fac"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1643,21 +1567,6 @@ files = [
|
||||
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "3.1.1"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
|
||||
{file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.20.0"
|
||||
@@ -1723,13 +1632,13 @@ psycopg2 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"},
|
||||
{file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
|
||||
{file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
|
||||
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1882,13 +1791,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"},
|
||||
{file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"},
|
||||
{file = "PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24"},
|
||||
{file = "pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2089,51 +1998,64 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.1"
|
||||
version = "6.0.2"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2346,29 +2268,29 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.5"
|
||||
version = "0.6.5"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"},
|
||||
{file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"},
|
||||
{file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"},
|
||||
{file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"},
|
||||
{file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"},
|
||||
{file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"},
|
||||
{file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"},
|
||||
{file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"},
|
||||
{file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"},
|
||||
{file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"},
|
||||
{file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"},
|
||||
{file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"},
|
||||
{file = "ruff-0.6.5-py3-none-linux_armv6l.whl", hash = "sha256:7e4e308f16e07c95fc7753fc1aaac690a323b2bb9f4ec5e844a97bb7fbebd748"},
|
||||
{file = "ruff-0.6.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:932cd69eefe4daf8c7d92bd6689f7e8182571cb934ea720af218929da7bd7d69"},
|
||||
{file = "ruff-0.6.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3a8d42d11fff8d3143ff4da41742a98f8f233bf8890e9fe23077826818f8d680"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a50af6e828ee692fb10ff2dfe53f05caecf077f4210fae9677e06a808275754f"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:794ada3400a0d0b89e3015f1a7e01f4c97320ac665b7bc3ade24b50b54cb2972"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:381413ec47f71ce1d1c614f7779d88886f406f1fd53d289c77e4e533dc6ea200"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:52e75a82bbc9b42e63c08d22ad0ac525117e72aee9729a069d7c4f235fc4d276"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09c72a833fd3551135ceddcba5ebdb68ff89225d30758027280968c9acdc7810"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:800c50371bdcb99b3c1551d5691e14d16d6f07063a518770254227f7f6e8c178"},
|
||||
{file = "ruff-0.6.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e25ddd9cd63ba1f3bd51c1f09903904a6adf8429df34f17d728a8fa11174253"},
|
||||
{file = "ruff-0.6.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7291e64d7129f24d1b0c947ec3ec4c0076e958d1475c61202497c6aced35dd19"},
|
||||
{file = "ruff-0.6.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9ad7dfbd138d09d9a7e6931e6a7e797651ce29becd688be8a0d4d5f8177b4b0c"},
|
||||
{file = "ruff-0.6.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:005256d977021790cc52aa23d78f06bb5090dc0bfbd42de46d49c201533982ae"},
|
||||
{file = "ruff-0.6.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:482c1e6bfeb615eafc5899127b805d28e387bd87db38b2c0c41d271f5e58d8cc"},
|
||||
{file = "ruff-0.6.5-py3-none-win32.whl", hash = "sha256:cf4d3fa53644137f6a4a27a2b397381d16454a1566ae5335855c187fbf67e4f5"},
|
||||
{file = "ruff-0.6.5-py3-none-win_amd64.whl", hash = "sha256:3e42a57b58e3612051a636bc1ac4e6b838679530235520e8f095f7c44f706ff9"},
|
||||
{file = "ruff-0.6.5-py3-none-win_arm64.whl", hash = "sha256:51935067740773afdf97493ba9b8231279e9beef0f2a8079188c4776c25688e0"},
|
||||
{file = "ruff-0.6.5.tar.gz", hash = "sha256:4d32d87fab433c0cf285c3683dd4dae63be05fd7a1d65b3f5bf7cdd05a6b96fb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2403,13 +2325,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.12.0"
|
||||
version = "2.14.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.12.0-py2.py3-none-any.whl", hash = "sha256:7a8d5163d2ba5c5f4464628c6b68f85e86972f7c636acc78aed45c61b98b7a5e"},
|
||||
{file = "sentry_sdk-2.12.0.tar.gz", hash = "sha256:8763840497b817d44c49b3fe3f5f7388d083f2337ffedf008b2cdb63b5c86dc6"},
|
||||
{file = "sentry_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:b8bc3dc51d06590df1291b7519b85c75e2ced4f28d9ea655b6d54033503b5bf4"},
|
||||
{file = "sentry_sdk-2.14.0.tar.gz", hash = "sha256:1e0e2eaf6dad918c7d1e0edac868a7bf20017b177f242cefe2a6bcd47955961d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2436,6 +2358,7 @@ httpx = ["httpx (>=0.16.0)"]
|
||||
huey = ["huey (>=2)"]
|
||||
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||
langchain = ["langchain (>=0.0.210)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
@@ -2634,13 +2557,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "towncrier"
|
||||
version = "24.7.1"
|
||||
version = "24.8.0"
|
||||
description = "Building newsfiles for your project."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "towncrier-24.7.1-py3-none-any.whl", hash = "sha256:685e2a94335b5dc47537b4d3b449a25b18571ea85b07dcf6e8df31ba40f692dd"},
|
||||
{file = "towncrier-24.7.1.tar.gz", hash = "sha256:57a057faedabcadf1a62f6f9bad726ae566c1f31a411338ddb8316993f583b3d"},
|
||||
{file = "towncrier-24.8.0-py3-none-any.whl", hash = "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d"},
|
||||
{file = "towncrier-24.8.0.tar.gz", hash = "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2699,13 +2622,13 @@ urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
name = "twisted"
|
||||
version = "24.7.0rc1"
|
||||
version = "24.7.0"
|
||||
description = "An asynchronous networking framework written in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "twisted-24.7.0rc1-py3-none-any.whl", hash = "sha256:f37d6656fe4e2871fab29d8952ae90bd6ca8b48a9e4dfa1b348f4cd62e6ba0bb"},
|
||||
{file = "twisted-24.7.0rc1.tar.gz", hash = "sha256:bbc4a2193ca34cfa32f626300746698a6d70fcd77d9c0b79a664c347e39634fc"},
|
||||
{file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"},
|
||||
{file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2802,13 +2725,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.23.0.20240712"
|
||||
version = "4.23.0.20240813"
|
||||
description = "Typing stubs for jsonschema"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-jsonschema-4.23.0.20240712.tar.gz", hash = "sha256:b20db728dcf7ea3e80e9bdeb55e8b8420c6c040cda14e8cf284465adee71d217"},
|
||||
{file = "types_jsonschema-4.23.0.20240712-py3-none-any.whl", hash = "sha256:8c33177ce95336241c1d61ccb56a9964d4361b99d5f1cd81a1ab4909b0dd7cf4"},
|
||||
{file = "types-jsonschema-4.23.0.20240813.tar.gz", hash = "sha256:c93f48206f209a5bc4608d295ac39f172fb98b9e24159ce577dbd25ddb79a1c0"},
|
||||
{file = "types_jsonschema-4.23.0.20240813-py3-none-any.whl", hash = "sha256:be283e23f0b87547316c2ee6b0fd36d95ea30e921db06478029e10b5b6aa6ac3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2838,24 +2761,24 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.2.0.20240520"
|
||||
version = "10.2.0.20240822"
|
||||
description = "Typing stubs for Pillow"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-Pillow-10.2.0.20240520.tar.gz", hash = "sha256:130b979195465fa1e1676d8e81c9c7c30319e8e95b12fae945e8f0d525213107"},
|
||||
{file = "types_Pillow-10.2.0.20240520-py3-none-any.whl", hash = "sha256:33c36494b380e2a269bb742181bea5d9b00820367822dbd3760f07210a1da23d"},
|
||||
{file = "types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3"},
|
||||
{file = "types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.20240417"
|
||||
version = "2.9.21.20240819"
|
||||
description = "Typing stubs for psycopg2"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-psycopg2-2.9.21.20240417.tar.gz", hash = "sha256:05db256f4a459fb21a426b8e7fca0656c3539105ff0208eaf6bdaf406a387087"},
|
||||
{file = "types_psycopg2-2.9.21.20240417-py3-none-any.whl", hash = "sha256:644d6644d64ebbe37203229b00771012fb3b3bddd507a129a2e136485990e4f8"},
|
||||
{file = "types-psycopg2-2.9.21.20240819.tar.gz", hash = "sha256:4ed6b47464d6374fa64e5e3b234cea0f710e72123a4596d67ab50b7415a84666"},
|
||||
{file = "types_psycopg2-2.9.21.20240819-py3-none-any.whl", hash = "sha256:c9192311c27d7ad561eef705f1b2df1074f2cdcf445a98a6a2fcaaaad43278cf"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2886,13 +2809,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.0.20240712"
|
||||
version = "2.32.0.20240914"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"},
|
||||
{file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"},
|
||||
{file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"},
|
||||
{file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2900,13 +2823,13 @@ urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "71.1.0.20240726"
|
||||
version = "74.1.0.20240907"
|
||||
description = "Typing stubs for setuptools"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-setuptools-71.1.0.20240726.tar.gz", hash = "sha256:85ba28e9461bb1be86ebba4db0f1c2408f2b11115b1966334ea9dc464e29303e"},
|
||||
{file = "types_setuptools-71.1.0.20240726-py3-none-any.whl", hash = "sha256:a7775376f36e0ff09bcad236bf265777590a66b11623e48c20bfc30f1444ea36"},
|
||||
{file = "types-setuptools-74.1.0.20240907.tar.gz", hash = "sha256:0abdb082552ca966c1e5fc244e4853adc62971f6cd724fb1d8a3713b580e5a65"},
|
||||
{file = "types_setuptools-74.1.0.20240907-py3-none-any.whl", hash = "sha256:15b38c8e63ca34f42f6063ff4b1dd662ea20086166d5ad6a102e670a52574120"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3181,4 +3104,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8.0"
|
||||
content-hash = "c165cdc1f6612c9f1b5bfd8063c23e2d595d717dd8ac1a468519e902be2cdf93"
|
||||
content-hash = "0c833ab57d2082e1ebe2627aef122ce4f93c1abe1f9d8739d5ea3fe52c79fa3f"
|
||||
|
||||
280
pylint.cfg
280
pylint.cfg
@@ -1,280 +0,0 @@
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=missing-docstring
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Add a comment according to your evaluation note. This is used by the global
|
||||
# evaluation report (RP0004).
|
||||
comment=no
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
ignored-classes=SQLObject
|
||||
|
||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
# to generated-members.
|
||||
zope=no
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=REQUEST,acl_users,aq_parent
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching the beginning of the name of dummy variables
|
||||
# (i.e. not used).
|
||||
dummy-variables-rgx=_$|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Required attributes for module, separated by a comma
|
||||
required-attributes=
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input
|
||||
|
||||
# Regular expression which should only match correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression which should only match correct module level names
|
||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression which should only match correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression which should only match correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct instance attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match correct attribute names in class
|
||||
# bodies
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression which should only match correct list comprehension /
|
||||
# generator expression variable names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=__.*__
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=80
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=regsub,TERMIOS,Bastion,rexec
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of interface methods to ignore, separated by a comma. This is used for
|
||||
# instance to not check methods defines in Zope's Interface base class.
|
||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
@@ -34,14 +34,9 @@
|
||||
name = "Internal Changes"
|
||||
showcontent = true
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py38', 'py39', 'py310', 'py311']
|
||||
# black ignores everything in .gitignore by default, see
|
||||
# https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore
|
||||
# Use `extend-exclude` if you want to exclude something in addition to this.
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
# See https://beta.ruff.rs/docs/rules/#error-e
|
||||
@@ -63,6 +58,8 @@ select = [
|
||||
"W",
|
||||
# pyflakes
|
||||
"F",
|
||||
# isort
|
||||
"I001",
|
||||
# flake8-bugbear
|
||||
"B0",
|
||||
# flake8-comprehensions
|
||||
@@ -79,17 +76,20 @@ select = [
|
||||
"EXE",
|
||||
]
|
||||
|
||||
[tool.isort]
|
||||
line_length = 88
|
||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"]
|
||||
default_section = "THIRDPARTY"
|
||||
known_first_party = ["synapse"]
|
||||
known_tests = ["tests"]
|
||||
known_twisted = ["twisted", "OpenSSL"]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
combine_as_imports = true
|
||||
skip_gitignore = true
|
||||
[tool.ruff.lint.isort]
|
||||
combine-as-imports = true
|
||||
section-order = ["future", "standard-library", "third-party", "twisted", "first-party", "testing", "local-folder"]
|
||||
known-first-party = ["synapse"]
|
||||
|
||||
[tool.ruff.lint.isort.sections]
|
||||
twisted = ["twisted", "OpenSSL"]
|
||||
testing = ["tests"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
|
||||
[tool.maturin]
|
||||
manifest-path = "rust/Cargo.toml"
|
||||
@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.113.0rc1"
|
||||
version = "1.115.0rc2"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
@@ -320,9 +320,7 @@ all = [
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.7.0"
|
||||
ruff = "0.5.5"
|
||||
ruff = "0.6.5"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. Se
|
||||
until then, this script is a best effort to stop us from introducing type coersion bugs
|
||||
(like the infamous stringy power levels fixed in room version 10).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import functools
|
||||
@@ -44,7 +45,6 @@ import traceback
|
||||
import unittest.mock
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
@@ -56,30 +56,17 @@ from typing import (
|
||||
)
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from synapse._pydantic_compat import HAS_PYDANTIC_V2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
)
|
||||
from pydantic.v1.typing import get_args
|
||||
else:
|
||||
from pydantic import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
)
|
||||
from pydantic.typing import get_args
|
||||
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
get_args,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||
@@ -182,22 +169,16 @@ def monkeypatch_pydantic() -> Generator[None, None, None]:
|
||||
# Most Synapse code ought to import the patched objects directly from
|
||||
# `pydantic`. But we also patch their containing modules `pydantic.main` and
|
||||
# `pydantic.types` for completeness.
|
||||
patch_basemodel1 = unittest.mock.patch(
|
||||
"pydantic.BaseModel", new=PatchedBaseModel
|
||||
patch_basemodel = unittest.mock.patch(
|
||||
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
|
||||
)
|
||||
patch_basemodel2 = unittest.mock.patch(
|
||||
"pydantic.main.BaseModel", new=PatchedBaseModel
|
||||
)
|
||||
patches.enter_context(patch_basemodel1)
|
||||
patches.enter_context(patch_basemodel2)
|
||||
patches.enter_context(patch_basemodel)
|
||||
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
|
||||
wrapper: Callable = make_wrapper(factory)
|
||||
patch1 = unittest.mock.patch(f"pydantic.{factory.__name__}", new=wrapper)
|
||||
patch2 = unittest.mock.patch(
|
||||
f"pydantic.types.{factory.__name__}", new=wrapper
|
||||
patch = unittest.mock.patch(
|
||||
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
|
||||
)
|
||||
patches.enter_context(patch1)
|
||||
patches.enter_context(patch2)
|
||||
patches.enter_context(patch)
|
||||
yield
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs linting scripts over the local Synapse checkout
|
||||
# black - opinionated code formatter
|
||||
# ruff - lints and finds mistakes
|
||||
# mypy - typechecks python code
|
||||
# cargo clippy - lints rust code
|
||||
|
||||
set -e
|
||||
|
||||
@@ -101,12 +102,6 @@ echo
|
||||
# Print out the commands being run
|
||||
set -x
|
||||
|
||||
# Ensure the sort order of imports.
|
||||
isort "${files[@]}"
|
||||
|
||||
# Ensure Python code conforms to an opinionated style.
|
||||
python3 -m black "${files[@]}"
|
||||
|
||||
# Ensure the sample configuration file conforms to style checks.
|
||||
./scripts-dev/config-lint.sh
|
||||
|
||||
@@ -114,6 +109,9 @@ python3 -m black "${files[@]}"
|
||||
# --quiet suppresses the update check.
|
||||
ruff check --quiet --fix "${files[@]}"
|
||||
|
||||
# Reformat Python code.
|
||||
ruff format --quiet "${files[@]}"
|
||||
|
||||
# Catch any common programming mistakes in Rust code.
|
||||
#
|
||||
# --bins, --examples, --lib, --tests combined explicitly disable checking
|
||||
|
||||
@@ -38,6 +38,7 @@ from mypy.types import (
|
||||
NoneType,
|
||||
TupleType,
|
||||
TypeAliasType,
|
||||
TypeVarType,
|
||||
UninhabitedType,
|
||||
UnionType,
|
||||
)
|
||||
@@ -233,6 +234,7 @@ IMMUTABLE_CUSTOM_TYPES = {
|
||||
"synapse.synapse_rust.push.FilteredPushRules",
|
||||
# This is technically not immutable, but close enough.
|
||||
"signedjson.types.VerifyKey",
|
||||
"synapse.types.StrCollection",
|
||||
}
|
||||
|
||||
# Immutable containers only if the values are also immutable.
|
||||
@@ -298,7 +300,7 @@ def is_cacheable(
|
||||
|
||||
elif rt.type.fullname in MUTABLE_CONTAINER_TYPES:
|
||||
# Mutable containers are mutable regardless of their underlying type.
|
||||
return False, None
|
||||
return False, f"container {rt.type.fullname} is mutable"
|
||||
|
||||
elif "attrs" in rt.type.metadata:
|
||||
# attrs classes are only cachable iff it is frozen (immutable itself)
|
||||
@@ -318,6 +320,9 @@ def is_cacheable(
|
||||
else:
|
||||
return False, "non-frozen attrs class"
|
||||
|
||||
elif rt.type.is_enum:
|
||||
# We assume Enum values are immutable
|
||||
return True, None
|
||||
else:
|
||||
# Ensure we fail for unknown types, these generally means that the
|
||||
# above code is not complete.
|
||||
@@ -326,6 +331,18 @@ def is_cacheable(
|
||||
f"Don't know how to handle {rt.type.fullname} return type instance",
|
||||
)
|
||||
|
||||
elif isinstance(rt, TypeVarType):
|
||||
# We consider TypeVars immutable if they are bound to a set of immutable
|
||||
# types.
|
||||
if rt.values:
|
||||
for value in rt.values:
|
||||
ok, note = is_cacheable(value, signature, verbose)
|
||||
if not ok:
|
||||
return False, f"TypeVar bound not cacheable {value}"
|
||||
return True, None
|
||||
|
||||
return False, "TypeVar is unbound"
|
||||
|
||||
elif isinstance(rt, NoneType):
|
||||
# None is cachable.
|
||||
return True, None
|
||||
|
||||
@@ -20,8 +20,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
"""An interactive script for doing a release. See `cli()` below.
|
||||
"""
|
||||
"""An interactive script for doing a release. See `cli()` below."""
|
||||
|
||||
import glob
|
||||
import json
|
||||
@@ -324,6 +323,11 @@ def tag(gh_token: Optional[str]) -> None:
|
||||
def _tag(gh_token: Optional[str]) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh.get_user()
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
|
||||
@@ -418,6 +422,11 @@ def publish(gh_token: str) -> None:
|
||||
def _publish(gh_token: str) -> None:
|
||||
"""Publish release on GitHub."""
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh.get_user()
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
get_repo_and_check_clean_checkout()
|
||||
|
||||
@@ -460,6 +469,11 @@ def upload(gh_token: Optional[str]) -> None:
|
||||
def _upload(gh_token: Optional[str]) -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh.get_user()
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
@@ -555,6 +569,11 @@ def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
|
||||
|
||||
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh.get_user()
|
||||
|
||||
# Find out the version and tag name.
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
@@ -711,6 +730,11 @@ Ask the designated people to do the blog and tweets."""
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||
def full(gh_token: str) -> None:
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh.get_user()
|
||||
|
||||
click.echo("1. If this is a security release, read the security wiki page.")
|
||||
click.echo("2. Check for any release blockers before proceeding.")
|
||||
click.echo(" https://github.com/element-hq/synapse/labels/X-Release-Blocker")
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi.
|
||||
"""
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any, List, Optional, Type, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
|
||||
@@ -20,8 +20,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" This is an implementation of a Matrix homeserver.
|
||||
"""
|
||||
"""This is an implementation of a Matrix homeserver."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -19,6 +19,8 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
try:
|
||||
@@ -30,4 +32,64 @@ except ImportError:
|
||||
|
||||
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
|
||||
|
||||
__all__ = ("HAS_PYDANTIC_V2",)
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import (
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
validator,
|
||||
)
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
from pydantic.v1.typing import get_args
|
||||
else:
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
validator,
|
||||
)
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
from pydantic.typing import get_args
|
||||
|
||||
__all__ = (
|
||||
"HAS_PYDANTIC_V2",
|
||||
"BaseModel",
|
||||
"constr",
|
||||
"conbytes",
|
||||
"conint",
|
||||
"confloat",
|
||||
"ErrorWrapper",
|
||||
"Extra",
|
||||
"Field",
|
||||
"get_args",
|
||||
"MissingError",
|
||||
"parse_obj_as",
|
||||
"PydanticValueError",
|
||||
"StrictBool",
|
||||
"StrictInt",
|
||||
"StrictStr",
|
||||
"ValidationError",
|
||||
"validator",
|
||||
)
|
||||
|
||||
@@ -171,7 +171,7 @@ def elide_http_methods_if_unconflicting(
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]]
|
||||
methods_and_paths: Iterable[Tuple[str, str]],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
@@ -201,7 +201,7 @@ def elide_http_methods_if_unconflicting(
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription]
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
|
||||
@@ -56,7 +56,9 @@ def main() -> None:
|
||||
password_pepper = password_config.get("pepper", password_pepper)
|
||||
password = args.password
|
||||
|
||||
if not password:
|
||||
if not password and not sys.stdin.isatty():
|
||||
password = sys.stdin.readline().strip()
|
||||
elif not password:
|
||||
password = prompt_for_pass()
|
||||
|
||||
# On Python 2, make sure we decode it to Unicode before we normalise it
|
||||
|
||||
@@ -40,6 +40,7 @@ from synapse.storage.engines import create_engine
|
||||
|
||||
class ReviewConfig(RootConfig):
|
||||
"A config class that just pulls out the database config"
|
||||
|
||||
config_classes = [DatabaseConfig]
|
||||
|
||||
|
||||
@@ -160,7 +161,11 @@ def main() -> None:
|
||||
|
||||
with make_conn(database_config, engine, "review_recent_signups") as db_conn:
|
||||
# This generates a type of Cursor, not LoggingTransaction.
|
||||
user_infos = get_recent_users(db_conn.cursor(), since_ms, exclude_users_with_appservice) # type: ignore[arg-type]
|
||||
user_infos = get_recent_users(
|
||||
db_conn.cursor(),
|
||||
since_ms, # type: ignore[arg-type]
|
||||
exclude_users_with_appservice,
|
||||
)
|
||||
|
||||
for user_info in user_infos:
|
||||
if exclude_users_with_email and user_info.emails:
|
||||
|
||||
@@ -129,6 +129,11 @@ BOOLEAN_COLUMNS = {
|
||||
"remote_media_cache": ["authenticated"],
|
||||
"room_stats_state": ["is_federatable"],
|
||||
"rooms": ["is_public", "has_auth_chain_index"],
|
||||
"sliding_sync_joined_rooms": ["is_encrypted"],
|
||||
"sliding_sync_membership_snapshots": [
|
||||
"has_known_state",
|
||||
"is_encrypted",
|
||||
],
|
||||
"users": ["shadow_banned", "approved", "locked", "suspended"],
|
||||
"un_partial_stated_event_stream": ["rejection_status_changed"],
|
||||
"users_who_share_rooms": ["share_private"],
|
||||
@@ -712,9 +717,7 @@ class Porter:
|
||||
return
|
||||
|
||||
# Check if all background updates are done, abort if not.
|
||||
updates_complete = (
|
||||
await self.sqlite_store.db_pool.updates.has_completed_background_updates()
|
||||
)
|
||||
updates_complete = await self.sqlite_store.db_pool.updates.has_completed_background_updates()
|
||||
if not updates_complete:
|
||||
end_error = (
|
||||
"Pending background updates exist in the SQLite3 database."
|
||||
@@ -1090,10 +1093,10 @@ class Porter:
|
||||
return done, remaining + done
|
||||
|
||||
async def _setup_state_group_id_seq(self) -> None:
|
||||
curr_id: Optional[int] = (
|
||||
await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||
)
|
||||
curr_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||
)
|
||||
|
||||
if not curr_id:
|
||||
@@ -1181,13 +1184,13 @@ class Porter:
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id: Optional[int] = (
|
||||
await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="event_auth_chains",
|
||||
keyvalues={},
|
||||
retcol="MAX(chain_id)",
|
||||
allow_none=True,
|
||||
)
|
||||
curr_chain_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="event_auth_chains",
|
||||
keyvalues={},
|
||||
retcol="MAX(chain_id)",
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
def r(txn: LoggingTransaction) -> None:
|
||||
|
||||
@@ -121,7 +121,9 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._hostname = hs.hostname
|
||||
self._admin_token = self._config.admin_token
|
||||
|
||||
self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata)
|
||||
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
|
||||
self._load_metadata
|
||||
)
|
||||
|
||||
if isinstance(auth_method, PrivateKeyJWTWithKid):
|
||||
# Use the JWK as the client secret when using the private_key_jwt method
|
||||
@@ -145,6 +147,33 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# metadata.validate_introspection_endpoint()
|
||||
return metadata
|
||||
|
||||
async def issuer(self) -> str:
|
||||
"""
|
||||
Get the configured issuer
|
||||
|
||||
This will use the issuer value set in the metadata,
|
||||
falling back to the one set in the config if not set in the metadata
|
||||
"""
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.issuer or self._config.issuer
|
||||
|
||||
async def account_management_url(self) -> Optional[str]:
|
||||
"""
|
||||
Get the configured account management URL
|
||||
|
||||
This will discover the account management URL from the issuer if it's not set in the config
|
||||
"""
|
||||
if self._config.account_management_url is not None:
|
||||
return self._config.account_management_url
|
||||
|
||||
try:
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.get("account_management_uri", None)
|
||||
# We don't want to raise here if we can't load the metadata
|
||||
except Exception:
|
||||
logger.warning("Failed to load metadata:", exc_info=True)
|
||||
return None
|
||||
|
||||
async def _introspection_endpoint(self) -> str:
|
||||
"""
|
||||
Returns the introspection endpoint of the issuer
|
||||
@@ -154,7 +183,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
if self._config.introspection_endpoint is not None:
|
||||
return self._config.introspection_endpoint
|
||||
|
||||
metadata = await self._load_metadata()
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.get("introspection_endpoint")
|
||||
|
||||
async def _introspect_token(self, token: str) -> IntrospectionToken:
|
||||
|
||||
@@ -230,6 +230,8 @@ class EventContentFields:
|
||||
|
||||
ROOM_NAME: Final = "name"
|
||||
|
||||
MEMBERSHIP: Final = "membership"
|
||||
|
||||
# Used in m.room.guest_access events.
|
||||
GUEST_ACCESS: Final = "guest_access"
|
||||
|
||||
@@ -245,6 +247,8 @@ class EventContentFields:
|
||||
# `m.room.encryption`` algorithm field
|
||||
ENCRYPTION_ALGORITHM: Final = "algorithm"
|
||||
|
||||
TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room"
|
||||
|
||||
|
||||
class EventUnsignedContentFields:
|
||||
"""Fields found inside the 'unsigned' data on events"""
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
#
|
||||
#
|
||||
|
||||
"""Contains the URL paths to prefix various aspects of the server with. """
|
||||
"""Contains the URL paths to prefix various aspects of the server with."""
|
||||
|
||||
import hmac
|
||||
from hashlib import sha256
|
||||
from urllib.parse import urlencode
|
||||
|
||||
@@ -98,6 +98,7 @@ from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
|
||||
from synapse.storage.databases.main.search import SearchStore
|
||||
from synapse.storage.databases.main.session import SessionStore
|
||||
from synapse.storage.databases.main.signatures import SignatureWorkerStore
|
||||
from synapse.storage.databases.main.sliding_sync import SlidingSyncStore
|
||||
from synapse.storage.databases.main.state import StateGroupWorkerStore
|
||||
from synapse.storage.databases.main.stats import StatsStore
|
||||
from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
@@ -159,6 +160,7 @@ class GenericWorkerStore(
|
||||
SessionStore,
|
||||
TaskSchedulerWorkerStore,
|
||||
ExperimentalFeaturesStore,
|
||||
SlidingSyncStore,
|
||||
):
|
||||
# Properties that multiple storage classes define. Tell mypy what the
|
||||
# expected type is.
|
||||
|
||||
@@ -54,6 +54,7 @@ UP & quit +---------- YES SUCCESS
|
||||
This is all tied together by the AppServiceScheduler which DIs the required
|
||||
components.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
|
||||
@@ -18,17 +18,11 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Any, Dict, Type, TypeVar
|
||||
from typing import Any, Dict, Type, TypeVar
|
||||
|
||||
import jsonschema
|
||||
|
||||
from synapse._pydantic_compat import HAS_PYDANTIC_V2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import BaseModel, ValidationError, parse_obj_as
|
||||
else:
|
||||
from pydantic import BaseModel, ValidationError, parse_obj_as
|
||||
|
||||
from synapse._pydantic_compat import BaseModel, ValidationError, parse_obj_as
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.types import JsonDict, StrSequence
|
||||
|
||||
|
||||
@@ -338,8 +338,10 @@ class ExperimentalConfig(Config):
|
||||
# MSC3391: Removing account data.
|
||||
self.msc3391_enabled = experimental.get("msc3391_enabled", False)
|
||||
|
||||
# MSC3575 (Sliding Sync API endpoints)
|
||||
self.msc3575_enabled: bool = experimental.get("msc3575_enabled", False)
|
||||
# MSC3575 (Sliding Sync) alternate endpoints, c.f. MSC4186.
|
||||
#
|
||||
# This is enabled by default as a replacement for the sliding sync proxy.
|
||||
self.msc3575_enabled: bool = experimental.get("msc3575_enabled", True)
|
||||
|
||||
# MSC3773: Thread notifications
|
||||
self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
|
||||
@@ -445,6 +447,3 @@ class ExperimentalConfig(Config):
|
||||
|
||||
# MSC4151: Report room API (Client-Server API)
|
||||
self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False)
|
||||
|
||||
# MSC4156: Migrate server_name to via
|
||||
self.msc4156_enabled: bool = experimental.get("msc4156_enabled", False)
|
||||
|
||||
@@ -200,16 +200,13 @@ class KeyConfig(Config):
|
||||
)
|
||||
form_secret = 'form_secret: "%s"' % random_string_with_symbols(50)
|
||||
|
||||
return (
|
||||
"""\
|
||||
return """\
|
||||
%(macaroon_secret_key)s
|
||||
%(form_secret)s
|
||||
signing_key_path: "%(base_key_name)s.signing.key"
|
||||
trusted_key_servers:
|
||||
- server_name: "matrix.org"
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
""" % locals()
|
||||
|
||||
def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey]:
|
||||
"""Read the signing keys in the given path.
|
||||
@@ -249,7 +246,9 @@ class KeyConfig(Config):
|
||||
if is_signing_algorithm_supported(key_id):
|
||||
key_base64 = key_data["key"]
|
||||
key_bytes = decode_base64(key_base64)
|
||||
verify_key: "VerifyKeyWithExpiry" = decode_verify_key_bytes(key_id, key_bytes) # type: ignore[assignment]
|
||||
verify_key: "VerifyKeyWithExpiry" = decode_verify_key_bytes(
|
||||
key_id, key_bytes
|
||||
) # type: ignore[assignment]
|
||||
verify_key.expired = key_data["expired_ts"]
|
||||
keys[key_id] = verify_key
|
||||
else:
|
||||
|
||||
@@ -157,12 +157,9 @@ class LoggingConfig(Config):
|
||||
self, config_dir_path: str, server_name: str, **kwargs: Any
|
||||
) -> str:
|
||||
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
||||
return (
|
||||
"""\
|
||||
return """\
|
||||
log_config: "%(log_config)s"
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
""" % locals()
|
||||
|
||||
def read_arguments(self, args: argparse.Namespace) -> None:
|
||||
if args.no_redirect_stdio is not None:
|
||||
|
||||
@@ -828,13 +828,10 @@ class ServerConfig(Config):
|
||||
).lstrip()
|
||||
|
||||
if not unsecure_listeners:
|
||||
unsecure_http_bindings = (
|
||||
"""- port: %(unsecure_port)s
|
||||
unsecure_http_bindings = """- port: %(unsecure_port)s
|
||||
tls: false
|
||||
type: http
|
||||
x_forwarded: true"""
|
||||
% locals()
|
||||
)
|
||||
x_forwarded: true""" % locals()
|
||||
|
||||
if not open_private_ports:
|
||||
unsecure_http_bindings += (
|
||||
@@ -853,16 +850,13 @@ class ServerConfig(Config):
|
||||
if not secure_listeners:
|
||||
secure_http_bindings = ""
|
||||
|
||||
return (
|
||||
"""\
|
||||
return """\
|
||||
server_name: "%(server_name)s"
|
||||
pid_file: %(pid_file)s
|
||||
listeners:
|
||||
%(secure_http_bindings)s
|
||||
%(unsecure_http_bindings)s
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
""" % locals()
|
||||
|
||||
def read_arguments(self, args: argparse.Namespace) -> None:
|
||||
if args.manhole is not None:
|
||||
|
||||
@@ -23,7 +23,12 @@ from typing import Any
|
||||
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import Config
|
||||
from ._base import Config, ConfigError, read_file
|
||||
|
||||
CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
|
||||
You have configured both `turn_shared_secret` and `turn_shared_secret_path`.
|
||||
These are mutually incompatible.
|
||||
"""
|
||||
|
||||
|
||||
class VoipConfig(Config):
|
||||
@@ -32,6 +37,13 @@ class VoipConfig(Config):
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.turn_uris = config.get("turn_uris", [])
|
||||
self.turn_shared_secret = config.get("turn_shared_secret")
|
||||
turn_shared_secret_path = config.get("turn_shared_secret_path")
|
||||
if turn_shared_secret_path:
|
||||
if self.turn_shared_secret:
|
||||
raise ConfigError(CONFLICTING_SHARED_SECRET_OPTS_ERROR)
|
||||
self.turn_shared_secret = read_file(
|
||||
turn_shared_secret_path, ("turn_shared_secret_path",)
|
||||
).strip()
|
||||
self.turn_username = config.get("turn_username")
|
||||
self.turn_password = config.get("turn_password")
|
||||
self.turn_user_lifetime = self.parse_duration(
|
||||
|
||||
@@ -22,17 +22,17 @@
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import attr
|
||||
|
||||
from synapse._pydantic_compat import HAS_PYDANTIC_V2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import BaseModel, Extra, StrictBool, StrictInt, StrictStr
|
||||
else:
|
||||
from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel,
|
||||
Extra,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
)
|
||||
from synapse.config._base import (
|
||||
Config,
|
||||
ConfigError,
|
||||
@@ -328,10 +328,11 @@ class WorkerConfig(Config):
|
||||
)
|
||||
|
||||
# type-ignore: the expression `Union[A, B]` is not a Type[Union[A, B]] currently
|
||||
self.instance_map: Dict[
|
||||
str, InstanceLocationConfig
|
||||
] = parse_and_validate_mapping(
|
||||
instance_map, InstanceLocationConfig # type: ignore[arg-type]
|
||||
self.instance_map: Dict[str, InstanceLocationConfig] = (
|
||||
parse_and_validate_mapping(
|
||||
instance_map,
|
||||
InstanceLocationConfig, # type: ignore[arg-type]
|
||||
)
|
||||
)
|
||||
|
||||
# Map from type of streams to source, c.f. WriterLocations.
|
||||
|
||||
@@ -589,7 +589,7 @@ class BaseV2KeyFetcher(KeyFetcher):
|
||||
% (server_name,)
|
||||
)
|
||||
|
||||
for key_id, key_data in response_json["old_verify_keys"].items():
|
||||
for key_id, key_data in response_json.get("old_verify_keys", {}).items():
|
||||
if is_signing_algorithm_supported(key_id):
|
||||
key_base64 = key_data["key"]
|
||||
key_bytes = decode_base64(key_base64)
|
||||
|
||||
@@ -887,7 +887,8 @@ def _check_power_levels(
|
||||
raise SynapseError(400, f"{v!r} must be an integer.")
|
||||
if k in {"events", "notifications", "users"}:
|
||||
if not isinstance(v, collections.abc.Mapping) or not all(
|
||||
type(v) is int for v in v.values() # noqa: E721
|
||||
type(v) is int
|
||||
for v in v.values() # noqa: E721
|
||||
):
|
||||
raise SynapseError(
|
||||
400,
|
||||
|
||||
@@ -80,7 +80,7 @@ def load_legacy_presence_router(hs: "HomeServer") -> None:
|
||||
# All methods that the module provides should be async, but this wasn't enforced
|
||||
# in the old module system, so we wrap them if needed
|
||||
def async_wrapper(
|
||||
f: Optional[Callable[P, R]]
|
||||
f: Optional[Callable[P, R]],
|
||||
) -> Optional[Callable[P, Awaitable[R]]]:
|
||||
# f might be None if the callback isn't implemented by the module. In this
|
||||
# case we don't want to register a callback at all so we return None.
|
||||
|
||||
@@ -504,7 +504,7 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
|
||||
|
||||
|
||||
def _encode_state_group_delta(
|
||||
state_group_delta: Dict[Tuple[int, int], StateMap[str]]
|
||||
state_group_delta: Dict[Tuple[int, int], StateMap[str]],
|
||||
) -> List[Tuple[int, int, Optional[List[Tuple[str, str, str]]]]]:
|
||||
if not state_group_delta:
|
||||
return []
|
||||
@@ -517,7 +517,7 @@ def _encode_state_group_delta(
|
||||
|
||||
|
||||
def _decode_state_group_delta(
|
||||
input: List[Tuple[int, int, List[Tuple[str, str, str]]]]
|
||||
input: List[Tuple[int, int, List[Tuple[str, str, str]]]],
|
||||
) -> Dict[Tuple[int, int], StateMap[str]]:
|
||||
if not input:
|
||||
return {}
|
||||
@@ -544,7 +544,7 @@ def _encode_state_dict(
|
||||
|
||||
|
||||
def _decode_state_dict(
|
||||
input: Optional[List[Tuple[str, str, str]]]
|
||||
input: Optional[List[Tuple[str, str, str]]],
|
||||
) -> Optional[StateMap[str]]:
|
||||
"""Decodes a state dict encoded using `_encode_state_dict` above"""
|
||||
if input is None:
|
||||
|
||||
@@ -19,17 +19,11 @@
|
||||
#
|
||||
#
|
||||
import collections.abc
|
||||
from typing import TYPE_CHECKING, List, Type, Union, cast
|
||||
from typing import List, Type, Union, cast
|
||||
|
||||
import jsonschema
|
||||
|
||||
from synapse._pydantic_compat import HAS_PYDANTIC_V2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import Field, StrictBool, StrictStr
|
||||
else:
|
||||
from pydantic import Field, StrictBool, StrictStr
|
||||
|
||||
from synapse._pydantic_compat import Field, StrictBool, StrictStr
|
||||
from synapse.api.constants import (
|
||||
MAX_ALIAS_LENGTH,
|
||||
EventContentFields,
|
||||
|
||||
@@ -19,5 +19,4 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" This package includes all the federation specific logic.
|
||||
"""
|
||||
"""This package includes all the federation specific logic."""
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" This module contains all the persistence actions done by the federation
|
||||
"""This module contains all the persistence actions done by the federation
|
||||
package.
|
||||
|
||||
These actions are mostly only used by the :py:mod:`.replication` module.
|
||||
|
||||
@@ -859,7 +859,6 @@ class FederationMediaThumbnailServlet(BaseFederationServerServlet):
|
||||
request: SynapseRequest,
|
||||
media_id: str,
|
||||
) -> None:
|
||||
|
||||
width = parse_integer(request, "width", required=True)
|
||||
height = parse_integer(request, "height", required=True)
|
||||
method = parse_string(request, "method", "scale")
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" Defines the JSON structure of the protocol units used by the server to
|
||||
"""Defines the JSON structure of the protocol units used by the server to
|
||||
server protocol.
|
||||
"""
|
||||
|
||||
|
||||
@@ -118,10 +118,10 @@ class AccountHandler:
|
||||
}
|
||||
|
||||
if self._use_account_validity_in_account_status:
|
||||
status["org.matrix.expired"] = (
|
||||
await self._account_validity_handler.is_user_expired(
|
||||
user_id.to_string()
|
||||
)
|
||||
status[
|
||||
"org.matrix.expired"
|
||||
] = await self._account_validity_handler.is_user_expired(
|
||||
user_id.to_string()
|
||||
)
|
||||
|
||||
return status
|
||||
|
||||
@@ -197,14 +197,16 @@ class AdminHandler:
|
||||
# events that we have and then filtering, this isn't the most
|
||||
# efficient method perhaps but it does guarantee we get everything.
|
||||
while True:
|
||||
events, _ = (
|
||||
await self._store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_key,
|
||||
to_key=to_key,
|
||||
limit=100,
|
||||
direction=Direction.FORWARDS,
|
||||
)
|
||||
(
|
||||
events,
|
||||
_,
|
||||
_,
|
||||
) = await self._store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_key,
|
||||
to_key=to_key,
|
||||
limit=100,
|
||||
direction=Direction.FORWARDS,
|
||||
)
|
||||
if not events:
|
||||
break
|
||||
|
||||
@@ -166,8 +166,7 @@ def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
|
||||
if "country" not in identifier or (
|
||||
# The specification requires a "phone" field, while Synapse used to require a "number"
|
||||
# field. Accept both for backwards compatibility.
|
||||
"phone" not in identifier
|
||||
and "number" not in identifier
|
||||
"phone" not in identifier and "number" not in identifier
|
||||
):
|
||||
raise SynapseError(
|
||||
400, "Invalid phone-type identifier", errcode=Codes.INVALID_PARAM
|
||||
|
||||
@@ -267,31 +267,27 @@ class DeviceWorkerHandler:
|
||||
newly_left_rooms.add(change.room_id)
|
||||
|
||||
# We now work out if any other users have since joined or left the rooms
|
||||
# the user is currently in. First we filter out rooms that we know
|
||||
# haven't changed recently.
|
||||
rooms_changed = self.store.get_rooms_that_changed(
|
||||
joined_room_ids, from_token.room_key
|
||||
)
|
||||
# the user is currently in.
|
||||
|
||||
# List of membership changes per room
|
||||
room_to_deltas: Dict[str, List[StateDelta]] = {}
|
||||
# The set of event IDs of membership events (so we can fetch their
|
||||
# associated membership).
|
||||
memberships_to_fetch: Set[str] = set()
|
||||
for room_id in rooms_changed:
|
||||
# TODO: Only pull out membership events?
|
||||
state_changes = await self.store.get_current_state_deltas_for_room(
|
||||
room_id, from_token=from_token.room_key, to_token=now_token.room_key
|
||||
)
|
||||
for delta in state_changes:
|
||||
if delta.event_type != EventTypes.Member:
|
||||
continue
|
||||
|
||||
room_to_deltas.setdefault(room_id, []).append(delta)
|
||||
if delta.event_id:
|
||||
memberships_to_fetch.add(delta.event_id)
|
||||
if delta.prev_event_id:
|
||||
memberships_to_fetch.add(delta.prev_event_id)
|
||||
# TODO: Only pull out membership events?
|
||||
state_changes = await self.store.get_current_state_deltas_for_rooms(
|
||||
joined_room_ids, from_token=from_token.room_key, to_token=now_token.room_key
|
||||
)
|
||||
for delta in state_changes:
|
||||
if delta.event_type != EventTypes.Member:
|
||||
continue
|
||||
|
||||
room_to_deltas.setdefault(delta.room_id, []).append(delta)
|
||||
if delta.event_id:
|
||||
memberships_to_fetch.add(delta.event_id)
|
||||
if delta.prev_event_id:
|
||||
memberships_to_fetch.add(delta.prev_event_id)
|
||||
|
||||
# Fetch all the memberships for the membership events
|
||||
event_id_to_memberships = await self.store.get_membership_from_event_ids(
|
||||
|
||||
@@ -265,9 +265,9 @@ class DirectoryHandler:
|
||||
async def get_association(self, room_alias: RoomAlias) -> JsonDict:
|
||||
room_id = None
|
||||
if self.hs.is_mine(room_alias):
|
||||
result: Optional[RoomAliasMapping] = (
|
||||
await self.get_association_from_room_alias(room_alias)
|
||||
)
|
||||
result: Optional[
|
||||
RoomAliasMapping
|
||||
] = await self.get_association_from_room_alias(room_alias)
|
||||
|
||||
if result:
|
||||
room_id = result.room_id
|
||||
@@ -512,11 +512,9 @@ class DirectoryHandler:
|
||||
raise SynapseError(403, "Not allowed to publish room")
|
||||
|
||||
# Check if publishing is blocked by a third party module
|
||||
allowed_by_third_party_rules = (
|
||||
await (
|
||||
self._third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
allowed_by_third_party_rules = await (
|
||||
self._third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
)
|
||||
if not allowed_by_third_party_rules:
|
||||
|
||||
@@ -1001,11 +1001,11 @@ class FederationHandler:
|
||||
)
|
||||
|
||||
if include_auth_user_id:
|
||||
event_content[EventContentFields.AUTHORISING_USER] = (
|
||||
await self._event_auth_handler.get_user_which_could_invite(
|
||||
room_id,
|
||||
state_ids,
|
||||
)
|
||||
event_content[
|
||||
EventContentFields.AUTHORISING_USER
|
||||
] = await self._event_auth_handler.get_user_which_could_invite(
|
||||
room_id,
|
||||
state_ids,
|
||||
)
|
||||
|
||||
builder = self.event_builder_factory.for_room_version(
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#
|
||||
|
||||
"""Utilities for interacting with Identity Servers"""
|
||||
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
@@ -1225,10 +1225,9 @@ class EventCreationHandler:
|
||||
)
|
||||
|
||||
if prev_event_ids is not None:
|
||||
assert (
|
||||
len(prev_event_ids) <= 10
|
||||
), "Attempting to create an event with %i prev_events" % (
|
||||
len(prev_event_ids),
|
||||
assert len(prev_event_ids) <= 10, (
|
||||
"Attempting to create an event with %i prev_events"
|
||||
% (len(prev_event_ids),)
|
||||
)
|
||||
else:
|
||||
prev_event_ids = await self.store.get_prev_events_for_room(builder.room_id)
|
||||
|
||||
@@ -507,15 +507,17 @@ class PaginationHandler:
|
||||
|
||||
# Initially fetch the events from the database. With any luck, we can return
|
||||
# these without blocking on backfill (handled below).
|
||||
events, next_key = (
|
||||
await self.store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_token.room_key,
|
||||
to_key=to_room_key,
|
||||
direction=pagin_config.direction,
|
||||
limit=pagin_config.limit,
|
||||
event_filter=event_filter,
|
||||
)
|
||||
(
|
||||
events,
|
||||
next_key,
|
||||
_,
|
||||
) = await self.store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_token.room_key,
|
||||
to_key=to_room_key,
|
||||
direction=pagin_config.direction,
|
||||
limit=pagin_config.limit,
|
||||
event_filter=event_filter,
|
||||
)
|
||||
|
||||
if pagin_config.direction == Direction.BACKWARDS:
|
||||
@@ -584,15 +586,17 @@ class PaginationHandler:
|
||||
# If we did backfill something, refetch the events from the database to
|
||||
# catch anything new that might have been added since we last fetched.
|
||||
if did_backfill:
|
||||
events, next_key = (
|
||||
await self.store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_token.room_key,
|
||||
to_key=to_room_key,
|
||||
direction=pagin_config.direction,
|
||||
limit=pagin_config.limit,
|
||||
event_filter=event_filter,
|
||||
)
|
||||
(
|
||||
events,
|
||||
next_key,
|
||||
_,
|
||||
) = await self.store.paginate_room_events_by_topological_ordering(
|
||||
room_id=room_id,
|
||||
from_key=from_token.room_key,
|
||||
to_key=to_room_key,
|
||||
direction=pagin_config.direction,
|
||||
limit=pagin_config.limit,
|
||||
event_filter=event_filter,
|
||||
)
|
||||
else:
|
||||
# Otherwise, we can backfill in the background for eventual
|
||||
|
||||
@@ -71,6 +71,7 @@ user state; this device follows the normal timeout logic (see above) and will
|
||||
automatically be replaced with any information from currently available devices.
|
||||
|
||||
"""
|
||||
|
||||
import abc
|
||||
import contextlib
|
||||
import itertools
|
||||
@@ -493,9 +494,9 @@ class WorkerPresenceHandler(BasePresenceHandler):
|
||||
|
||||
# The number of ongoing syncs on this process, by (user ID, device ID).
|
||||
# Empty if _presence_enabled is false.
|
||||
self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
|
||||
{}
|
||||
)
|
||||
self._user_device_to_num_current_syncs: Dict[
|
||||
Tuple[str, Optional[str]], int
|
||||
] = {}
|
||||
|
||||
self.notifier = hs.get_notifier()
|
||||
self.instance_id = hs.get_instance_id()
|
||||
@@ -818,9 +819,9 @@ class PresenceHandler(BasePresenceHandler):
|
||||
|
||||
# Keeps track of the number of *ongoing* syncs on this process. While
|
||||
# this is non zero a user will never go offline.
|
||||
self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
|
||||
{}
|
||||
)
|
||||
self._user_device_to_num_current_syncs: Dict[
|
||||
Tuple[str, Optional[str]], int
|
||||
] = {}
|
||||
|
||||
# Keeps track of the number of *ongoing* syncs on other processes.
|
||||
#
|
||||
|
||||
@@ -74,6 +74,17 @@ class ProfileHandler:
|
||||
self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules
|
||||
|
||||
async def get_profile(self, user_id: str, ignore_backoff: bool = True) -> JsonDict:
|
||||
"""
|
||||
Get a user's profile as a JSON dictionary.
|
||||
|
||||
Args:
|
||||
user_id: The user to fetch the profile of.
|
||||
ignore_backoff: True to ignore backoff when fetching over federation.
|
||||
|
||||
Returns:
|
||||
A JSON dictionary. For local queries this will include the displayname and avatar_url
|
||||
fields. For remote queries it may contain arbitrary information.
|
||||
"""
|
||||
target_user = UserID.from_string(user_id)
|
||||
|
||||
if self.hs.is_mine(target_user):
|
||||
@@ -107,6 +118,15 @@ class ProfileHandler:
|
||||
raise e.to_synapse_error()
|
||||
|
||||
async def get_displayname(self, target_user: UserID) -> Optional[str]:
|
||||
"""
|
||||
Fetch a user's display name from their profile.
|
||||
|
||||
Args:
|
||||
target_user: The user to fetch the display name of.
|
||||
|
||||
Returns:
|
||||
The user's display name or None if unset.
|
||||
"""
|
||||
if self.hs.is_mine(target_user):
|
||||
try:
|
||||
displayname = await self.store.get_profile_displayname(target_user)
|
||||
@@ -203,6 +223,15 @@ class ProfileHandler:
|
||||
await self._update_join_states(requester, target_user)
|
||||
|
||||
async def get_avatar_url(self, target_user: UserID) -> Optional[str]:
|
||||
"""
|
||||
Fetch a user's avatar URL from their profile.
|
||||
|
||||
Args:
|
||||
target_user: The user to fetch the avatar URL of.
|
||||
|
||||
Returns:
|
||||
The user's avatar URL or None if unset.
|
||||
"""
|
||||
if self.hs.is_mine(target_user):
|
||||
try:
|
||||
avatar_url = await self.store.get_profile_avatar_url(target_user)
|
||||
@@ -322,9 +351,9 @@ class ProfileHandler:
|
||||
server_name = host
|
||||
|
||||
if self._is_mine_server_name(server_name):
|
||||
media_info: Optional[Union[LocalMedia, RemoteMedia]] = (
|
||||
await self.store.get_local_media(media_id)
|
||||
)
|
||||
media_info: Optional[
|
||||
Union[LocalMedia, RemoteMedia]
|
||||
] = await self.store.get_local_media(media_id)
|
||||
else:
|
||||
media_info = await self.store.get_cached_remote_media(server_name, media_id)
|
||||
|
||||
@@ -403,6 +432,12 @@ class ProfileHandler:
|
||||
async def _update_join_states(
|
||||
self, requester: Requester, target_user: UserID
|
||||
) -> None:
|
||||
"""
|
||||
Update the membership events of each room the user is joined to with the
|
||||
new profile information.
|
||||
|
||||
Note that this stomps over any custom display name or avatar URL in member events.
|
||||
"""
|
||||
if not self.hs.is_mine(target_user):
|
||||
return
|
||||
|
||||
|
||||
@@ -188,13 +188,13 @@ class RelationsHandler:
|
||||
if include_original_event:
|
||||
# Do not bundle aggregations when retrieving the original event because
|
||||
# we want the content before relations are applied to it.
|
||||
return_value["original_event"] = (
|
||||
await self._event_serializer.serialize_event(
|
||||
event,
|
||||
now,
|
||||
bundle_aggregations=None,
|
||||
config=serialize_options,
|
||||
)
|
||||
return_value[
|
||||
"original_event"
|
||||
] = await self._event_serializer.serialize_event(
|
||||
event,
|
||||
now,
|
||||
bundle_aggregations=None,
|
||||
config=serialize_options,
|
||||
)
|
||||
|
||||
if next_token:
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#
|
||||
|
||||
"""Contains functions for performing actions on rooms."""
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import math
|
||||
@@ -900,11 +901,9 @@ class RoomCreationHandler:
|
||||
)
|
||||
|
||||
# Check whether this visibility value is blocked by a third party module
|
||||
allowed_by_third_party_rules = (
|
||||
await (
|
||||
self._third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
allowed_by_third_party_rules = await (
|
||||
self._third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
)
|
||||
if not allowed_by_third_party_rules:
|
||||
@@ -1754,7 +1753,7 @@ class RoomEventSource(EventSource[RoomStreamToken, EventBase]):
|
||||
)
|
||||
|
||||
events = list(room_events)
|
||||
events.extend(e for evs, _ in room_to_events.values() for e in evs)
|
||||
events.extend(e for evs, _, _ in room_to_events.values() for e in evs)
|
||||
|
||||
# We know stream_ordering must be not None here, as its been
|
||||
# persisted, but mypy doesn't know that
|
||||
|
||||
@@ -1302,11 +1302,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
# If this is going to be a local join, additional information must
|
||||
# be included in the event content in order to efficiently validate
|
||||
# the event.
|
||||
content[EventContentFields.AUTHORISING_USER] = (
|
||||
await self.event_auth_handler.get_user_which_could_invite(
|
||||
room_id,
|
||||
state_before_join,
|
||||
)
|
||||
content[
|
||||
EventContentFields.AUTHORISING_USER
|
||||
] = await self.event_auth_handler.get_user_which_could_invite(
|
||||
room_id,
|
||||
state_before_join,
|
||||
)
|
||||
|
||||
return False, []
|
||||
@@ -1415,9 +1415,9 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
|
||||
if requester is not None:
|
||||
sender = UserID.from_string(event.sender)
|
||||
assert (
|
||||
sender == requester.user
|
||||
), "Sender (%s) must be same as requester (%s)" % (sender, requester.user)
|
||||
assert sender == requester.user, (
|
||||
"Sender (%s) must be same as requester (%s)" % (sender, requester.user)
|
||||
)
|
||||
assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,)
|
||||
else:
|
||||
requester = types.create_requester(target_user)
|
||||
|
||||
@@ -183,8 +183,13 @@ class RoomSummaryHandler:
|
||||
) -> JsonDict:
|
||||
"""See docstring for SpaceSummaryHandler.get_room_hierarchy."""
|
||||
|
||||
# First of all, check that the room is accessible.
|
||||
if not await self._is_local_room_accessible(requested_room_id, requester):
|
||||
# If the room is available locally, quickly check that the user can access it.
|
||||
local_room = await self._store.is_host_joined(
|
||||
requested_room_id, self._server_name
|
||||
)
|
||||
if local_room and not await self._is_local_room_accessible(
|
||||
requested_room_id, requester
|
||||
):
|
||||
raise UnstableSpecAuthError(
|
||||
403,
|
||||
"User %s not in room %s, and room previews are disabled"
|
||||
@@ -192,6 +197,22 @@ class RoomSummaryHandler:
|
||||
errcode=Codes.NOT_JOINED,
|
||||
)
|
||||
|
||||
if not local_room:
|
||||
room_hierarchy = await self._summarize_remote_room_hierarchy(
|
||||
_RoomQueueEntry(requested_room_id, ()),
|
||||
False,
|
||||
)
|
||||
root_room_entry = room_hierarchy[0]
|
||||
if not root_room_entry or not await self._is_remote_room_accessible(
|
||||
requester, requested_room_id, root_room_entry.room
|
||||
):
|
||||
raise UnstableSpecAuthError(
|
||||
403,
|
||||
"User %s not in room %s, and room previews are disabled"
|
||||
% (requester, requested_room_id),
|
||||
errcode=Codes.NOT_JOINED,
|
||||
)
|
||||
|
||||
# If this is continuing a previous session, pull the persisted data.
|
||||
if from_token:
|
||||
try:
|
||||
|
||||
@@ -423,9 +423,9 @@ class SearchHandler:
|
||||
}
|
||||
|
||||
if search_result.room_groups and "room_id" in group_keys:
|
||||
rooms_cat_res.setdefault("groups", {})[
|
||||
"room_id"
|
||||
] = search_result.room_groups
|
||||
rooms_cat_res.setdefault("groups", {})["room_id"] = (
|
||||
search_result.room_groups
|
||||
)
|
||||
|
||||
if sender_group and "sender" in group_keys:
|
||||
rooms_cat_res.setdefault("groups", {})["sender"] = sender_group
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1216
synapse/handlers/sliding_sync/__init__.py
Normal file
1216
synapse/handlers/sliding_sync/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
861
synapse/handlers/sliding_sync/extensions.py
Normal file
861
synapse/handlers/sliding_sync/extensions.py
Normal file
@@ -0,0 +1,861 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
AbstractSet,
|
||||
ChainMap,
|
||||
Dict,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
cast,
|
||||
)
|
||||
|
||||
from typing_extensions import assert_never
|
||||
|
||||
from synapse.api.constants import AccountDataTypes, EduTypes
|
||||
from synapse.handlers.receipts import ReceiptEventSource
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.storage.databases.main.receipts import ReceiptInRoom
|
||||
from synapse.types import (
|
||||
DeviceListUpdates,
|
||||
JsonMapping,
|
||||
MultiWriterStreamToken,
|
||||
SlidingSyncStreamToken,
|
||||
StrCollection,
|
||||
StreamToken,
|
||||
)
|
||||
from synapse.types.handlers.sliding_sync import (
|
||||
HaveSentRoomFlag,
|
||||
MutablePerConnectionState,
|
||||
OperationType,
|
||||
PerConnectionState,
|
||||
SlidingSyncConfig,
|
||||
SlidingSyncResult,
|
||||
)
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SlidingSyncExtensionHandler:
|
||||
"""Handles the extensions to sliding sync."""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastores().main
|
||||
self.event_sources = hs.get_event_sources()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
self.push_rules_handler = hs.get_push_rules_handler()
|
||||
|
||||
@trace
|
||||
async def get_extensions_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
previous_connection_state: "PerConnectionState",
|
||||
new_connection_state: "MutablePerConnectionState",
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> SlidingSyncResult.Extensions:
|
||||
"""Handle extension requests.
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
new_connection_state: Snapshot of the current per-connection state
|
||||
new_per_connection_state: A mutable copy of the per-connection
|
||||
state, used to record updates to the state during this request.
|
||||
actual_lists: Sliding window API. A map of list key to list results in the
|
||||
Sliding Sync response.
|
||||
actual_room_ids: The actual room IDs in the the Sliding Sync response.
|
||||
actual_room_response_map: A map of room ID to room results in the the
|
||||
Sliding Sync response.
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
|
||||
if sync_config.extensions is None:
|
||||
return SlidingSyncResult.Extensions()
|
||||
|
||||
to_device_response = None
|
||||
if sync_config.extensions.to_device is not None:
|
||||
to_device_response = await self.get_to_device_extension_response(
|
||||
sync_config=sync_config,
|
||||
to_device_request=sync_config.extensions.to_device,
|
||||
to_token=to_token,
|
||||
)
|
||||
|
||||
e2ee_response = None
|
||||
if sync_config.extensions.e2ee is not None:
|
||||
e2ee_response = await self.get_e2ee_extension_response(
|
||||
sync_config=sync_config,
|
||||
e2ee_request=sync_config.extensions.e2ee,
|
||||
to_token=to_token,
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
account_data_response = None
|
||||
if sync_config.extensions.account_data is not None:
|
||||
account_data_response = await self.get_account_data_extension_response(
|
||||
sync_config=sync_config,
|
||||
previous_connection_state=previous_connection_state,
|
||||
new_connection_state=new_connection_state,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
account_data_request=sync_config.extensions.account_data,
|
||||
to_token=to_token,
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
receipts_response = None
|
||||
if sync_config.extensions.receipts is not None:
|
||||
receipts_response = await self.get_receipts_extension_response(
|
||||
sync_config=sync_config,
|
||||
previous_connection_state=previous_connection_state,
|
||||
new_connection_state=new_connection_state,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
actual_room_response_map=actual_room_response_map,
|
||||
receipts_request=sync_config.extensions.receipts,
|
||||
to_token=to_token,
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
typing_response = None
|
||||
if sync_config.extensions.typing is not None:
|
||||
typing_response = await self.get_typing_extension_response(
|
||||
sync_config=sync_config,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
actual_room_response_map=actual_room_response_map,
|
||||
typing_request=sync_config.extensions.typing,
|
||||
to_token=to_token,
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions(
|
||||
to_device=to_device_response,
|
||||
e2ee=e2ee_response,
|
||||
account_data=account_data_response,
|
||||
receipts=receipts_response,
|
||||
typing=typing_response,
|
||||
)
|
||||
|
||||
def find_relevant_room_ids_for_extension(
|
||||
self,
|
||||
requested_lists: Optional[StrCollection],
|
||||
requested_room_ids: Optional[StrCollection],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: AbstractSet[str],
|
||||
) -> Set[str]:
|
||||
"""
|
||||
Handle the reserved `lists`/`rooms` keys for extensions. Extensions should only
|
||||
return results for rooms in the Sliding Sync response. This matches up the
|
||||
requested rooms/lists with the actual lists/rooms in the Sliding Sync response.
|
||||
|
||||
{"lists": []} // Do not process any lists.
|
||||
{"lists": ["rooms", "dms"]} // Process only a subset of lists.
|
||||
{"lists": ["*"]} // Process all lists defined in the Sliding Window API. (This is the default.)
|
||||
|
||||
{"rooms": []} // Do not process any specific rooms.
|
||||
{"rooms": ["!a:b", "!c:d"]} // Process only a subset of room subscriptions.
|
||||
{"rooms": ["*"]} // Process all room subscriptions defined in the Room Subscription API. (This is the default.)
|
||||
|
||||
Args:
|
||||
requested_lists: The `lists` from the extension request.
|
||||
requested_room_ids: The `rooms` from the extension request.
|
||||
actual_lists: The actual lists from the Sliding Sync response.
|
||||
actual_room_ids: The actual room subscriptions from the Sliding Sync request.
|
||||
"""
|
||||
|
||||
# We only want to include account data for rooms that are already in the sliding
|
||||
# sync response AND that were requested in the account data request.
|
||||
relevant_room_ids: Set[str] = set()
|
||||
|
||||
# See what rooms from the room subscriptions we should get account data for
|
||||
if requested_room_ids is not None:
|
||||
for room_id in requested_room_ids:
|
||||
# A wildcard means we process all rooms from the room subscriptions
|
||||
if room_id == "*":
|
||||
relevant_room_ids.update(actual_room_ids)
|
||||
break
|
||||
|
||||
if room_id in actual_room_ids:
|
||||
relevant_room_ids.add(room_id)
|
||||
|
||||
# See what rooms from the sliding window lists we should get account data for
|
||||
if requested_lists is not None:
|
||||
for list_key in requested_lists:
|
||||
# Just some typing because we share the variable name in multiple places
|
||||
actual_list: Optional[SlidingSyncResult.SlidingWindowList] = None
|
||||
|
||||
# A wildcard means we process rooms from all lists
|
||||
if list_key == "*":
|
||||
for actual_list in actual_lists.values():
|
||||
# We only expect a single SYNC operation for any list
|
||||
assert len(actual_list.ops) == 1
|
||||
sync_op = actual_list.ops[0]
|
||||
assert sync_op.op == OperationType.SYNC
|
||||
|
||||
relevant_room_ids.update(sync_op.room_ids)
|
||||
|
||||
break
|
||||
|
||||
actual_list = actual_lists.get(list_key)
|
||||
if actual_list is not None:
|
||||
# We only expect a single SYNC operation for any list
|
||||
assert len(actual_list.ops) == 1
|
||||
sync_op = actual_list.ops[0]
|
||||
assert sync_op.op == OperationType.SYNC
|
||||
|
||||
relevant_room_ids.update(sync_op.room_ids)
|
||||
|
||||
return relevant_room_ids
|
||||
|
||||
@trace
|
||||
async def get_to_device_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
to_device_request: SlidingSyncConfig.Extensions.ToDeviceExtension,
|
||||
to_token: StreamToken,
|
||||
) -> Optional[SlidingSyncResult.Extensions.ToDeviceExtension]:
|
||||
"""Handle to-device extension (MSC3885)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
to_device_request: The to-device extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
"""
|
||||
user_id = sync_config.user.to_string()
|
||||
device_id = sync_config.requester.device_id
|
||||
|
||||
# Skip if the extension is not enabled
|
||||
if not to_device_request.enabled:
|
||||
return None
|
||||
|
||||
# Check that this request has a valid device ID (not all requests have
|
||||
# to belong to a device, and so device_id is None)
|
||||
if device_id is None:
|
||||
return SlidingSyncResult.Extensions.ToDeviceExtension(
|
||||
next_batch=f"{to_token.to_device_key}",
|
||||
events=[],
|
||||
)
|
||||
|
||||
since_stream_id = 0
|
||||
if to_device_request.since is not None:
|
||||
# We've already validated this is an int.
|
||||
since_stream_id = int(to_device_request.since)
|
||||
|
||||
if to_token.to_device_key < since_stream_id:
|
||||
# The since token is ahead of our current token, so we return an
|
||||
# empty response.
|
||||
logger.warning(
|
||||
"Got to-device.since from the future. since token: %r is ahead of our current to_device stream position: %r",
|
||||
since_stream_id,
|
||||
to_token.to_device_key,
|
||||
)
|
||||
return SlidingSyncResult.Extensions.ToDeviceExtension(
|
||||
next_batch=to_device_request.since,
|
||||
events=[],
|
||||
)
|
||||
|
||||
# Delete everything before the given since token, as we know the
|
||||
# device must have received them.
|
||||
deleted = await self.store.delete_messages_for_device(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
up_to_stream_id=since_stream_id,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Deleted %d to-device messages up to %d for %s",
|
||||
deleted,
|
||||
since_stream_id,
|
||||
user_id,
|
||||
)
|
||||
|
||||
messages, stream_id = await self.store.get_messages_for_device(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
from_stream_id=since_stream_id,
|
||||
to_stream_id=to_token.to_device_key,
|
||||
limit=min(to_device_request.limit, 100), # Limit to at most 100 events
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions.ToDeviceExtension(
|
||||
next_batch=f"{stream_id}",
|
||||
events=messages,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def get_e2ee_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
e2ee_request: SlidingSyncConfig.Extensions.E2eeExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> Optional[SlidingSyncResult.Extensions.E2eeExtension]:
|
||||
"""Handle E2EE device extension (MSC3884)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
e2ee_request: The e2ee extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
user_id = sync_config.user.to_string()
|
||||
device_id = sync_config.requester.device_id
|
||||
|
||||
# Skip if the extension is not enabled
|
||||
if not e2ee_request.enabled:
|
||||
return None
|
||||
|
||||
device_list_updates: Optional[DeviceListUpdates] = None
|
||||
if from_token is not None:
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
device_list_updates = await self.device_handler.get_user_ids_changed(
|
||||
user_id=user_id,
|
||||
from_token=from_token.stream_token,
|
||||
)
|
||||
|
||||
device_one_time_keys_count: Mapping[str, int] = {}
|
||||
device_unused_fallback_key_types: Sequence[str] = []
|
||||
if device_id:
|
||||
# TODO: We should have a way to let clients differentiate between the states of:
|
||||
# * no change in OTK count since the provided since token
|
||||
# * the server has zero OTKs left for this device
|
||||
# Spec issue: https://github.com/matrix-org/matrix-doc/issues/3298
|
||||
device_one_time_keys_count = await self.store.count_e2e_one_time_keys(
|
||||
user_id, device_id
|
||||
)
|
||||
device_unused_fallback_key_types = (
|
||||
await self.store.get_e2e_unused_fallback_key_types(user_id, device_id)
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions.E2eeExtension(
|
||||
device_list_updates=device_list_updates,
|
||||
device_one_time_keys_count=device_one_time_keys_count,
|
||||
device_unused_fallback_key_types=device_unused_fallback_key_types,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def get_account_data_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
previous_connection_state: "PerConnectionState",
|
||||
new_connection_state: "MutablePerConnectionState",
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
account_data_request: SlidingSyncConfig.Extensions.AccountDataExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> Optional[SlidingSyncResult.Extensions.AccountDataExtension]:
|
||||
"""Handle Account Data extension (MSC3959)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
actual_lists: Sliding window API. A map of list key to list results in the
|
||||
Sliding Sync response.
|
||||
actual_room_ids: The actual room IDs in the the Sliding Sync response.
|
||||
account_data_request: The account_data extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
user_id = sync_config.user.to_string()
|
||||
|
||||
# Skip if the extension is not enabled
|
||||
if not account_data_request.enabled:
|
||||
return None
|
||||
|
||||
global_account_data_map: Mapping[str, JsonMapping] = {}
|
||||
if from_token is not None:
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
global_account_data_map = (
|
||||
await self.store.get_updated_global_account_data_for_user(
|
||||
user_id, from_token.stream_token.account_data_key
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
have_push_rules_changed = await self.store.have_push_rules_changed_for_user(
|
||||
user_id, from_token.stream_token.push_rules_key
|
||||
)
|
||||
if have_push_rules_changed:
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
global_account_data_map[
|
||||
AccountDataTypes.PUSH_RULES
|
||||
] = await self.push_rules_handler.push_rules_for_user(sync_config.user)
|
||||
else:
|
||||
# TODO: This should take into account the `to_token`
|
||||
immutable_global_account_data_map = (
|
||||
await self.store.get_global_account_data_for_user(user_id)
|
||||
)
|
||||
|
||||
# Use a `ChainMap` to avoid copying the immutable data from the cache
|
||||
global_account_data_map = ChainMap(
|
||||
{
|
||||
# TODO: This should take into account the `to_token`
|
||||
AccountDataTypes.PUSH_RULES: await self.push_rules_handler.push_rules_for_user(
|
||||
sync_config.user
|
||||
)
|
||||
},
|
||||
# Cast is safe because `ChainMap` only mutates the top-most map,
|
||||
# see https://github.com/python/typeshed/issues/8430
|
||||
cast(
|
||||
MutableMapping[str, JsonMapping], immutable_global_account_data_map
|
||||
),
|
||||
)
|
||||
|
||||
# Fetch room account data
|
||||
#
|
||||
account_data_by_room_map: MutableMapping[str, Mapping[str, JsonMapping]] = {}
|
||||
relevant_room_ids = self.find_relevant_room_ids_for_extension(
|
||||
requested_lists=account_data_request.lists,
|
||||
requested_room_ids=account_data_request.rooms,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
)
|
||||
if len(relevant_room_ids) > 0:
|
||||
# We need to handle the different cases depending on if we have sent
|
||||
# down account data previously or not, so we split the relevant
|
||||
# rooms up into different collections based on status.
|
||||
live_rooms = set()
|
||||
previously_rooms: Dict[str, int] = {}
|
||||
initial_rooms = set()
|
||||
|
||||
for room_id in relevant_room_ids:
|
||||
if not from_token:
|
||||
initial_rooms.add(room_id)
|
||||
continue
|
||||
|
||||
room_status = previous_connection_state.account_data.have_sent_room(
|
||||
room_id
|
||||
)
|
||||
if room_status.status == HaveSentRoomFlag.LIVE:
|
||||
live_rooms.add(room_id)
|
||||
elif room_status.status == HaveSentRoomFlag.PREVIOUSLY:
|
||||
assert room_status.last_token is not None
|
||||
previously_rooms[room_id] = room_status.last_token
|
||||
elif room_status.status == HaveSentRoomFlag.NEVER:
|
||||
initial_rooms.add(room_id)
|
||||
else:
|
||||
assert_never(room_status.status)
|
||||
|
||||
# We fetch all room account data since the from_token. This is so
|
||||
# that we can record which rooms have updates that haven't been sent
|
||||
# down.
|
||||
#
|
||||
# Mapping from room_id to mapping of `type` to `content` of room account
|
||||
# data events.
|
||||
all_updates_since_the_from_token: Mapping[
|
||||
str, Mapping[str, JsonMapping]
|
||||
] = {}
|
||||
if from_token is not None:
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
all_updates_since_the_from_token = (
|
||||
await self.store.get_updated_room_account_data_for_user(
|
||||
user_id, from_token.stream_token.account_data_key
|
||||
)
|
||||
)
|
||||
|
||||
# Add room tags
|
||||
#
|
||||
# TODO: This should take into account the `from_token` and `to_token`
|
||||
tags_by_room = await self.store.get_updated_tags(
|
||||
user_id, from_token.stream_token.account_data_key
|
||||
)
|
||||
for room_id, tags in tags_by_room.items():
|
||||
all_updates_since_the_from_token.setdefault(room_id, {})[
|
||||
AccountDataTypes.TAG
|
||||
] = {"tags": tags}
|
||||
|
||||
# For live rooms we just get the updates from `all_updates_since_the_from_token`
|
||||
if live_rooms:
|
||||
for room_id in all_updates_since_the_from_token.keys() & live_rooms:
|
||||
account_data_by_room_map[room_id] = (
|
||||
all_updates_since_the_from_token[room_id]
|
||||
)
|
||||
|
||||
# For previously and initial rooms we query each room individually.
|
||||
if previously_rooms or initial_rooms:
|
||||
|
||||
async def handle_previously(room_id: str) -> None:
|
||||
# Either get updates or all account data in the room
|
||||
# depending on if the room state is PREVIOUSLY or NEVER.
|
||||
previous_token = previously_rooms.get(room_id)
|
||||
if previous_token is not None:
|
||||
room_account_data = await (
|
||||
self.store.get_updated_room_account_data_for_user_for_room(
|
||||
user_id=user_id,
|
||||
room_id=room_id,
|
||||
from_stream_id=previous_token,
|
||||
to_stream_id=to_token.account_data_key,
|
||||
)
|
||||
)
|
||||
|
||||
# Add room tags
|
||||
changed = await self.store.has_tags_changed_for_room(
|
||||
user_id=user_id,
|
||||
room_id=room_id,
|
||||
from_stream_id=previous_token,
|
||||
to_stream_id=to_token.account_data_key,
|
||||
)
|
||||
if changed:
|
||||
# XXX: Ideally, this should take into account the `to_token`
|
||||
# and return the set of tags at that time but we don't track
|
||||
# changes to tags so we just have to return all tags for the
|
||||
# room.
|
||||
immutable_tag_map = await self.store.get_tags_for_room(
|
||||
user_id, room_id
|
||||
)
|
||||
if immutable_tag_map:
|
||||
room_account_data[AccountDataTypes.TAG] = {
|
||||
"tags": immutable_tag_map
|
||||
}
|
||||
|
||||
# Only add an entry if there were any updates.
|
||||
if room_account_data:
|
||||
account_data_by_room_map[room_id] = room_account_data
|
||||
else:
|
||||
# TODO: This should take into account the `to_token`
|
||||
immutable_room_account_data = (
|
||||
await self.store.get_account_data_for_room(user_id, room_id)
|
||||
)
|
||||
|
||||
# Add room tags
|
||||
#
|
||||
# XXX: Ideally, this should take into account the `to_token`
|
||||
# and return the set of tags at that time but we don't track
|
||||
# changes to tags so we just have to return all tags for the
|
||||
# room.
|
||||
immutable_tag_map = await self.store.get_tags_for_room(
|
||||
user_id, room_id
|
||||
)
|
||||
|
||||
account_data_by_room_map[room_id] = ChainMap(
|
||||
{AccountDataTypes.TAG: {"tags": immutable_tag_map}}
|
||||
if immutable_tag_map
|
||||
else {},
|
||||
# Cast is safe because `ChainMap` only mutates the top-most map,
|
||||
# see https://github.com/python/typeshed/issues/8430
|
||||
cast(
|
||||
MutableMapping[str, JsonMapping],
|
||||
immutable_room_account_data,
|
||||
),
|
||||
)
|
||||
|
||||
# We handle these rooms concurrently to speed it up.
|
||||
await concurrently_execute(
|
||||
handle_previously,
|
||||
previously_rooms.keys() | initial_rooms,
|
||||
limit=20,
|
||||
)
|
||||
|
||||
# Now record which rooms are now up to data, and which rooms have
|
||||
# pending updates to send.
|
||||
new_connection_state.account_data.record_sent_rooms(relevant_room_ids)
|
||||
missing_updates = (
|
||||
all_updates_since_the_from_token.keys() - relevant_room_ids
|
||||
)
|
||||
if missing_updates:
|
||||
# If we have missing updates then we must have had a from_token.
|
||||
assert from_token is not None
|
||||
|
||||
new_connection_state.account_data.record_unsent_rooms(
|
||||
missing_updates, from_token.stream_token.account_data_key
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions.AccountDataExtension(
|
||||
global_account_data_map=global_account_data_map,
|
||||
account_data_by_room_map=account_data_by_room_map,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def get_receipts_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
previous_connection_state: "PerConnectionState",
|
||||
new_connection_state: "MutablePerConnectionState",
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
receipts_request: SlidingSyncConfig.Extensions.ReceiptsExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> Optional[SlidingSyncResult.Extensions.ReceiptsExtension]:
|
||||
"""Handle Receipts extension (MSC3960)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
previous_connection_state: The current per-connection state
|
||||
new_connection_state: A mutable copy of the per-connection
|
||||
state, used to record updates to the state.
|
||||
actual_lists: Sliding window API. A map of list key to list results in the
|
||||
Sliding Sync response.
|
||||
actual_room_ids: The actual room IDs in the the Sliding Sync response.
|
||||
actual_room_response_map: A map of room ID to room results in the the
|
||||
Sliding Sync response.
|
||||
account_data_request: The account_data extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
# Skip if the extension is not enabled
|
||||
if not receipts_request.enabled:
|
||||
return None
|
||||
|
||||
relevant_room_ids = self.find_relevant_room_ids_for_extension(
|
||||
requested_lists=receipts_request.lists,
|
||||
requested_room_ids=receipts_request.rooms,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
)
|
||||
|
||||
room_id_to_receipt_map: Dict[str, JsonMapping] = {}
|
||||
if len(relevant_room_ids) > 0:
|
||||
# We need to handle the different cases depending on if we have sent
|
||||
# down receipts previously or not, so we split the relevant rooms
|
||||
# up into different collections based on status.
|
||||
live_rooms = set()
|
||||
previously_rooms: Dict[str, MultiWriterStreamToken] = {}
|
||||
initial_rooms = set()
|
||||
|
||||
for room_id in relevant_room_ids:
|
||||
if not from_token:
|
||||
initial_rooms.add(room_id)
|
||||
continue
|
||||
|
||||
# If we're sending down the room from scratch again for some
|
||||
# reason, we should always resend the receipts as well
|
||||
# (regardless of if we've sent them down before). This is to
|
||||
# mimic the behaviour of what happens on initial sync, where you
|
||||
# get a chunk of timeline with all of the corresponding receipts
|
||||
# for the events in the timeline.
|
||||
#
|
||||
# We also resend down receipts when we "expand" the timeline,
|
||||
# (see the "XXX: Odd behavior" in
|
||||
# `synapse.handlers.sliding_sync`).
|
||||
room_result = actual_room_response_map.get(room_id)
|
||||
if room_result is not None:
|
||||
if room_result.initial or room_result.unstable_expanded_timeline:
|
||||
initial_rooms.add(room_id)
|
||||
continue
|
||||
|
||||
room_status = previous_connection_state.receipts.have_sent_room(room_id)
|
||||
if room_status.status == HaveSentRoomFlag.LIVE:
|
||||
live_rooms.add(room_id)
|
||||
elif room_status.status == HaveSentRoomFlag.PREVIOUSLY:
|
||||
assert room_status.last_token is not None
|
||||
previously_rooms[room_id] = room_status.last_token
|
||||
elif room_status.status == HaveSentRoomFlag.NEVER:
|
||||
initial_rooms.add(room_id)
|
||||
else:
|
||||
assert_never(room_status.status)
|
||||
|
||||
# The set of receipts that we fetched. Private receipts need to be
|
||||
# filtered out before returning.
|
||||
fetched_receipts = []
|
||||
|
||||
# For live rooms we just fetch all receipts in those rooms since the
|
||||
# `since` token.
|
||||
if live_rooms:
|
||||
assert from_token is not None
|
||||
receipts = await self.store.get_linearized_receipts_for_rooms(
|
||||
room_ids=live_rooms,
|
||||
from_key=from_token.stream_token.receipt_key,
|
||||
to_key=to_token.receipt_key,
|
||||
)
|
||||
fetched_receipts.extend(receipts)
|
||||
|
||||
# For rooms we've previously sent down, but aren't up to date, we
|
||||
# need to use the from token from the room status.
|
||||
if previously_rooms:
|
||||
# Fetch any missing rooms concurrently.
|
||||
|
||||
async def handle_previously_room(room_id: str) -> None:
|
||||
receipt_token = previously_rooms[room_id]
|
||||
# TODO: Limit the number of receipts we're about to send down
|
||||
# for the room, if its too many we should TODO
|
||||
previously_receipts = (
|
||||
await self.store.get_linearized_receipts_for_room(
|
||||
room_id=room_id,
|
||||
from_key=receipt_token,
|
||||
to_key=to_token.receipt_key,
|
||||
)
|
||||
)
|
||||
fetched_receipts.extend(previously_receipts)
|
||||
|
||||
await concurrently_execute(
|
||||
handle_previously_room, previously_rooms.keys(), 20
|
||||
)
|
||||
|
||||
if initial_rooms:
|
||||
# We also always send down receipts for the current user.
|
||||
user_receipts = (
|
||||
await self.store.get_linearized_receipts_for_user_in_rooms(
|
||||
user_id=sync_config.user.to_string(),
|
||||
room_ids=initial_rooms,
|
||||
to_key=to_token.receipt_key,
|
||||
)
|
||||
)
|
||||
|
||||
# For rooms we haven't previously sent down, we could send all receipts
|
||||
# from that room but we only want to include receipts for events
|
||||
# in the timeline to avoid bloating and blowing up the sync response
|
||||
# as the number of users in the room increases. (this behavior is part of the spec)
|
||||
initial_rooms_and_event_ids = [
|
||||
(room_id, event.event_id)
|
||||
for room_id in initial_rooms
|
||||
if room_id in actual_room_response_map
|
||||
for event in actual_room_response_map[room_id].timeline_events
|
||||
]
|
||||
initial_receipts = await self.store.get_linearized_receipts_for_events(
|
||||
room_and_event_ids=initial_rooms_and_event_ids,
|
||||
)
|
||||
|
||||
# Combine the receipts for a room and add them to
|
||||
# `fetched_receipts`
|
||||
for room_id in initial_receipts.keys() | user_receipts.keys():
|
||||
receipt_content = ReceiptInRoom.merge_to_content(
|
||||
list(
|
||||
itertools.chain(
|
||||
initial_receipts.get(room_id, []),
|
||||
user_receipts.get(room_id, []),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
fetched_receipts.append(
|
||||
{
|
||||
"room_id": room_id,
|
||||
"type": EduTypes.RECEIPT,
|
||||
"content": receipt_content,
|
||||
}
|
||||
)
|
||||
|
||||
fetched_receipts = ReceiptEventSource.filter_out_private_receipts(
|
||||
fetched_receipts, sync_config.user.to_string()
|
||||
)
|
||||
|
||||
for receipt in fetched_receipts:
|
||||
# These fields should exist for every receipt
|
||||
room_id = receipt["room_id"]
|
||||
type = receipt["type"]
|
||||
content = receipt["content"]
|
||||
|
||||
room_id_to_receipt_map[room_id] = {"type": type, "content": content}
|
||||
|
||||
# Now we update the per-connection state to track which receipts we have
|
||||
# and haven't sent down.
|
||||
new_connection_state.receipts.record_sent_rooms(relevant_room_ids)
|
||||
|
||||
if from_token:
|
||||
# Now find the set of rooms that may have receipts that we're not sending
|
||||
# down. We only need to check rooms that we have previously returned
|
||||
# receipts for (in `previous_connection_state`) because we only care about
|
||||
# updating `LIVE` rooms to `PREVIOUSLY`. The `PREVIOUSLY` rooms will just
|
||||
# stay pointing at their previous position so we don't need to waste time
|
||||
# checking those and since we default to `NEVER`, rooms that were `NEVER`
|
||||
# sent before don't need to be recorded as we'll handle them correctly when
|
||||
# they come into range for the first time.
|
||||
rooms_no_receipts = [
|
||||
room_id
|
||||
for room_id, room_status in previous_connection_state.receipts._statuses.items()
|
||||
if room_status.status == HaveSentRoomFlag.LIVE
|
||||
and room_id not in relevant_room_ids
|
||||
]
|
||||
changed_rooms = await self.store.get_rooms_with_receipts_between(
|
||||
rooms_no_receipts,
|
||||
from_key=from_token.stream_token.receipt_key,
|
||||
to_key=to_token.receipt_key,
|
||||
)
|
||||
new_connection_state.receipts.record_unsent_rooms(
|
||||
changed_rooms, from_token.stream_token.receipt_key
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions.ReceiptsExtension(
|
||||
room_id_to_receipt_map=room_id_to_receipt_map,
|
||||
)
|
||||
|
||||
async def get_typing_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
typing_request: SlidingSyncConfig.Extensions.TypingExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> Optional[SlidingSyncResult.Extensions.TypingExtension]:
|
||||
"""Handle Typing Notification extension (MSC3961)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
actual_lists: Sliding window API. A map of list key to list results in the
|
||||
Sliding Sync response.
|
||||
actual_room_ids: The actual room IDs in the the Sliding Sync response.
|
||||
actual_room_response_map: A map of room ID to room results in the the
|
||||
Sliding Sync response.
|
||||
account_data_request: The account_data extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
# Skip if the extension is not enabled
|
||||
if not typing_request.enabled:
|
||||
return None
|
||||
|
||||
relevant_room_ids = self.find_relevant_room_ids_for_extension(
|
||||
requested_lists=typing_request.lists,
|
||||
requested_room_ids=typing_request.rooms,
|
||||
actual_lists=actual_lists,
|
||||
actual_room_ids=actual_room_ids,
|
||||
)
|
||||
|
||||
room_id_to_typing_map: Dict[str, JsonMapping] = {}
|
||||
if len(relevant_room_ids) > 0:
|
||||
# Note: We don't need to take connection tracking into account for typing
|
||||
# notifications because they'll get anything still relevant and hasn't timed
|
||||
# out when the room comes into range. We consider the gap where the room
|
||||
# fell out of range, as long enough for any typing notifications to have
|
||||
# timed out (it's not worth the 30 seconds of data we may have missed).
|
||||
typing_source = self.event_sources.sources.typing
|
||||
typing_notifications, _ = await typing_source.get_new_events(
|
||||
user=sync_config.user,
|
||||
from_key=(from_token.stream_token.typing_key if from_token else 0),
|
||||
to_key=to_token.typing_key,
|
||||
# This is a dummy value and isn't used in the function
|
||||
limit=0,
|
||||
room_ids=relevant_room_ids,
|
||||
is_guest=False,
|
||||
)
|
||||
|
||||
for typing_notification in typing_notifications:
|
||||
# These fields should exist for every typing notification
|
||||
room_id = typing_notification["room_id"]
|
||||
type = typing_notification["type"]
|
||||
content = typing_notification["content"]
|
||||
|
||||
room_id_to_typing_map[room_id] = {"type": type, "content": content}
|
||||
|
||||
return SlidingSyncResult.Extensions.TypingExtension(
|
||||
room_id_to_typing_map=room_id_to_typing_map,
|
||||
)
|
||||
1943
synapse/handlers/sliding_sync/room_lists.py
Normal file
1943
synapse/handlers/sliding_sync/room_lists.py
Normal file
File diff suppressed because it is too large
Load Diff
128
synapse/handlers/sliding_sync/store.py
Normal file
128
synapse/handlers/sliding_sync/store.py
Normal file
@@ -0,0 +1,128 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import SlidingSyncStreamToken
|
||||
from synapse.types.handlers.sliding_sync import (
|
||||
MutablePerConnectionState,
|
||||
PerConnectionState,
|
||||
SlidingSyncConfig,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class SlidingSyncConnectionStore:
|
||||
"""In-memory store of per-connection state, including what rooms we have
|
||||
previously sent down a sliding sync connection.
|
||||
|
||||
Note: This is NOT safe to run in a worker setup because connection positions will
|
||||
point to different sets of rooms on different workers. e.g. for the same connection,
|
||||
a connection position of 5 might have totally different states on worker A and
|
||||
worker B.
|
||||
|
||||
One complication that we need to deal with here is needing to handle requests being
|
||||
resent, i.e. if we sent down a room in a response that the client received, we must
|
||||
consider the room *not* sent when we get the request again.
|
||||
|
||||
This is handled by using an integer "token", which is returned to the client
|
||||
as part of the sync token. For each connection we store a mapping from
|
||||
tokens to the room states, and create a new entry when we send down new
|
||||
rooms.
|
||||
|
||||
Note that for any given sliding sync connection we will only store a maximum
|
||||
of two different tokens: the previous token from the request and a new token
|
||||
sent in the response. When we receive a request with a given token, we then
|
||||
clear out all other entries with a different token.
|
||||
|
||||
Attributes:
|
||||
_connections: Mapping from `(user_id, conn_id)` to mapping of `token`
|
||||
to mapping of room ID to `HaveSentRoom`.
|
||||
"""
|
||||
|
||||
store: "DataStore"
|
||||
|
||||
async def get_and_clear_connection_positions(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> PerConnectionState:
|
||||
"""Fetch the per-connection state for the token.
|
||||
|
||||
Raises:
|
||||
SlidingSyncUnknownPosition if the connection_token is unknown
|
||||
"""
|
||||
# If this is our first request, there is no previous connection state to fetch out of the database
|
||||
if from_token is None or from_token.connection_position == 0:
|
||||
return PerConnectionState()
|
||||
|
||||
conn_id = sync_config.conn_id or ""
|
||||
|
||||
device_id = sync_config.requester.device_id
|
||||
assert device_id is not None
|
||||
|
||||
return await self.store.get_and_clear_connection_positions(
|
||||
sync_config.user.to_string(),
|
||||
device_id,
|
||||
conn_id,
|
||||
from_token.connection_position,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def record_new_state(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
new_connection_state: MutablePerConnectionState,
|
||||
) -> int:
|
||||
"""Record updated per-connection state, returning the connection
|
||||
position associated with the new state.
|
||||
If there are no changes to the state this may return the same token as
|
||||
the existing per-connection state.
|
||||
"""
|
||||
if not new_connection_state.has_updates():
|
||||
if from_token is not None:
|
||||
return from_token.connection_position
|
||||
else:
|
||||
return 0
|
||||
|
||||
# A from token with a zero connection position means there was no
|
||||
# previously stored connection state, so we treat a zero the same as
|
||||
# there being no previous position.
|
||||
previous_connection_position = None
|
||||
if from_token is not None and from_token.connection_position != 0:
|
||||
previous_connection_position = from_token.connection_position
|
||||
|
||||
conn_id = sync_config.conn_id or ""
|
||||
|
||||
device_id = sync_config.requester.device_id
|
||||
assert device_id is not None
|
||||
|
||||
return await self.store.persist_per_connection_state(
|
||||
sync_config.user.to_string(),
|
||||
device_id,
|
||||
conn_id,
|
||||
previous_connection_position,
|
||||
new_connection_state,
|
||||
)
|
||||
@@ -183,10 +183,7 @@ class JoinedSyncResult:
|
||||
to tell if room needs to be part of the sync result.
|
||||
"""
|
||||
return bool(
|
||||
self.timeline
|
||||
or self.state
|
||||
or self.ephemeral
|
||||
or self.account_data
|
||||
self.timeline or self.state or self.ephemeral or self.account_data
|
||||
# nb the notification count does not, er, count: if there's nothing
|
||||
# else in the result, we don't need to send it.
|
||||
)
|
||||
@@ -575,10 +572,10 @@ class SyncHandler:
|
||||
if timeout == 0 or since_token is None or full_state:
|
||||
# we are going to return immediately, so don't bother calling
|
||||
# notifier.wait_for_events.
|
||||
result: Union[SyncResult, E2eeSyncResult] = (
|
||||
await self.current_sync_for_user(
|
||||
sync_config, sync_version, since_token, full_state=full_state
|
||||
)
|
||||
result: Union[
|
||||
SyncResult, E2eeSyncResult
|
||||
] = await self.current_sync_for_user(
|
||||
sync_config, sync_version, since_token, full_state=full_state
|
||||
)
|
||||
else:
|
||||
# Otherwise, we wait for something to happen and report it to the user.
|
||||
@@ -673,10 +670,10 @@ class SyncHandler:
|
||||
|
||||
# Go through the `/sync` v2 path
|
||||
if sync_version == SyncVersion.SYNC_V2:
|
||||
sync_result: Union[SyncResult, E2eeSyncResult] = (
|
||||
await self.generate_sync_result(
|
||||
sync_config, since_token, full_state
|
||||
)
|
||||
sync_result: Union[
|
||||
SyncResult, E2eeSyncResult
|
||||
] = await self.generate_sync_result(
|
||||
sync_config, since_token, full_state
|
||||
)
|
||||
# Go through the MSC3575 Sliding Sync `/sync/e2ee` path
|
||||
elif sync_version == SyncVersion.E2EE_SYNC:
|
||||
@@ -909,7 +906,7 @@ class SyncHandler:
|
||||
# Use `stream_ordering` for updates
|
||||
else paginate_room_events_by_stream_ordering
|
||||
)
|
||||
events, end_key = await pagination_method(
|
||||
events, end_key, limited = await pagination_method(
|
||||
room_id=room_id,
|
||||
# The bounds are reversed so we can paginate backwards
|
||||
# (from newer to older events) starting at to_bound.
|
||||
@@ -917,9 +914,7 @@ class SyncHandler:
|
||||
from_key=end_key,
|
||||
to_key=since_key,
|
||||
direction=Direction.BACKWARDS,
|
||||
# We add one so we can determine if there are enough events to saturate
|
||||
# the limit or not (see `limited`)
|
||||
limit=load_limit + 1,
|
||||
limit=load_limit,
|
||||
)
|
||||
# We want to return the events in ascending order (the last event is the
|
||||
# most recent).
|
||||
@@ -974,9 +969,6 @@ class SyncHandler:
|
||||
loaded_recents.extend(recents)
|
||||
recents = loaded_recents
|
||||
|
||||
if len(events) <= load_limit:
|
||||
limited = False
|
||||
break
|
||||
max_repeat -= 1
|
||||
|
||||
if len(recents) > timeline_limit:
|
||||
@@ -1488,13 +1480,16 @@ class SyncHandler:
|
||||
# timeline here. The caller will then dedupe any redundant
|
||||
# ones.
|
||||
|
||||
state_ids = await self._state_storage_controller.get_state_ids_for_event(
|
||||
batch.events[0].event_id,
|
||||
# we only want members!
|
||||
state_filter=StateFilter.from_types(
|
||||
(EventTypes.Member, member) for member in members_to_fetch
|
||||
),
|
||||
await_full_state=False,
|
||||
state_ids = (
|
||||
await self._state_storage_controller.get_state_ids_for_event(
|
||||
batch.events[0].event_id,
|
||||
# we only want members!
|
||||
state_filter=StateFilter.from_types(
|
||||
(EventTypes.Member, member)
|
||||
for member in members_to_fetch
|
||||
),
|
||||
await_full_state=False,
|
||||
)
|
||||
)
|
||||
return state_ids
|
||||
|
||||
@@ -2166,18 +2161,18 @@ class SyncHandler:
|
||||
|
||||
if push_rules_changed:
|
||||
global_account_data = dict(global_account_data)
|
||||
global_account_data[AccountDataTypes.PUSH_RULES] = (
|
||||
await self._push_rules_handler.push_rules_for_user(sync_config.user)
|
||||
)
|
||||
global_account_data[
|
||||
AccountDataTypes.PUSH_RULES
|
||||
] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
|
||||
else:
|
||||
all_global_account_data = await self.store.get_global_account_data_for_user(
|
||||
user_id
|
||||
)
|
||||
|
||||
global_account_data = dict(all_global_account_data)
|
||||
global_account_data[AccountDataTypes.PUSH_RULES] = (
|
||||
await self._push_rules_handler.push_rules_for_user(sync_config.user)
|
||||
)
|
||||
global_account_data[
|
||||
AccountDataTypes.PUSH_RULES
|
||||
] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
|
||||
|
||||
account_data_for_user = (
|
||||
await sync_config.filter_collection.filter_global_account_data(
|
||||
@@ -2608,7 +2603,7 @@ class SyncHandler:
|
||||
|
||||
newly_joined = room_id in newly_joined_rooms
|
||||
if room_entry:
|
||||
events, start_key = room_entry
|
||||
events, start_key, _ = room_entry
|
||||
# We want to return the events in ascending order (the last event is the
|
||||
# most recent).
|
||||
events.reverse()
|
||||
|
||||
@@ -183,7 +183,7 @@ class WorkerLocksHandler:
|
||||
return
|
||||
|
||||
def _wake_all_locks(
|
||||
locks: Collection[Union[WaitingLock, WaitingMultiLock]]
|
||||
locks: Collection[Union[WaitingLock, WaitingMultiLock]],
|
||||
) -> None:
|
||||
for lock in locks:
|
||||
deferred = lock.deferred
|
||||
|
||||
@@ -1057,11 +1057,11 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
if not self.parser:
|
||||
|
||||
def on_header_field(data: bytes, start: int, end: int) -> None:
|
||||
if data[start:end] == b"Location":
|
||||
if data[start:end].lower() == b"location":
|
||||
self.has_redirect = True
|
||||
if data[start:end] == b"Content-Disposition":
|
||||
if data[start:end].lower() == b"content-disposition":
|
||||
self.in_disposition = True
|
||||
if data[start:end] == b"Content-Type":
|
||||
if data[start:end].lower() == b"content-type":
|
||||
self.in_content_type = True
|
||||
|
||||
def on_header_value(data: bytes, start: int, end: int) -> None:
|
||||
@@ -1088,7 +1088,6 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
return
|
||||
# otherwise we are in the file part
|
||||
else:
|
||||
logger.info("Writing multipart file data to stream")
|
||||
try:
|
||||
self.stream.write(data[start:end])
|
||||
except Exception as e:
|
||||
@@ -1314,6 +1313,5 @@ def is_unknown_endpoint(
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
|
||||
@@ -1756,8 +1756,10 @@ class MatrixFederationHttpClient:
|
||||
request.destination,
|
||||
str_url,
|
||||
)
|
||||
# We don't know how large the response will be upfront, so limit it to
|
||||
# the `max_size` config value.
|
||||
length, headers, _, _ = await self._simple_http_client.get_file(
|
||||
str_url, output_stream, expected_size
|
||||
str_url, output_stream, max_size
|
||||
)
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -233,7 +233,7 @@ def return_html_error(
|
||||
|
||||
|
||||
def wrap_async_request_handler(
|
||||
h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]]
|
||||
h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]],
|
||||
) -> Callable[["_AsyncResource", "SynapseRequest"], "defer.Deferred[None]"]:
|
||||
"""Wraps an async request handler so that it calls request.processing.
|
||||
|
||||
|
||||
@@ -37,19 +37,17 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from synapse._pydantic_compat import HAS_PYDANTIC_V2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import BaseModel, MissingError, PydanticValueError, ValidationError
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
else:
|
||||
from pydantic import BaseModel, MissingError, PydanticValueError, ValidationError
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel,
|
||||
ErrorWrapper,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
ValidationError,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http import redact_uri
|
||||
from synapse.http.server import HttpServer
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"""
|
||||
Log formatters that output terse JSON.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" Thread-local-alike tracking of log contexts within synapse
|
||||
"""Thread-local-alike tracking of log contexts within synapse
|
||||
|
||||
This module provides objects and utilities for tracking contexts through
|
||||
synapse code, so that log lines can include a request identifier, and so that
|
||||
@@ -29,6 +29,7 @@ them.
|
||||
|
||||
See doc/log_contexts.rst for details on how this works.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import typing
|
||||
@@ -751,7 +752,7 @@ def preserve_fn(
|
||||
f: Union[
|
||||
Callable[P, R],
|
||||
Callable[P, Awaitable[R]],
|
||||
]
|
||||
],
|
||||
) -> Callable[P, "defer.Deferred[R]"]:
|
||||
"""Function decorator which wraps the function with run_in_background"""
|
||||
|
||||
|
||||
@@ -169,6 +169,7 @@ Gotchas
|
||||
than one caller? Will all of those calling functions have be in a context
|
||||
with an active span?
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import enum
|
||||
import inspect
|
||||
@@ -414,7 +415,7 @@ def ensure_active_span(
|
||||
"""
|
||||
|
||||
def ensure_active_span_inner_1(
|
||||
func: Callable[P, R]
|
||||
func: Callable[P, R],
|
||||
) -> Callable[P, Union[Optional[T], R]]:
|
||||
@wraps(func)
|
||||
def ensure_active_span_inner_2(
|
||||
@@ -700,7 +701,7 @@ def set_operation_name(operation_name: str) -> None:
|
||||
|
||||
@only_if_tracing
|
||||
def force_tracing(
|
||||
span: Union["opentracing.Span", _Sentinel] = _Sentinel.sentinel
|
||||
span: Union["opentracing.Span", _Sentinel] = _Sentinel.sentinel,
|
||||
) -> None:
|
||||
"""Force sampling for the active/given span and its children.
|
||||
|
||||
@@ -1032,13 +1033,13 @@ def tag_args(func: Callable[P, R]) -> Callable[P, R]:
|
||||
def _wrapping_logic(
|
||||
_func: Callable[P, R], *args: P.args, **kwargs: P.kwargs
|
||||
) -> Generator[None, None, None]:
|
||||
# We use `[1:]` to skip the `self` object reference and `start=1` to
|
||||
# make the index line up with `argspec.args`.
|
||||
#
|
||||
# FIXME: We could update this to handle any type of function by ignoring the
|
||||
# first argument only if it's named `self` or `cls`. This isn't fool-proof
|
||||
# but handles the idiomatic cases.
|
||||
for i, arg in enumerate(args[1:], start=1):
|
||||
for i, arg in enumerate(args, start=0):
|
||||
if argspec.args[i] in ("self", "cls"):
|
||||
# Ignore `self` and `cls` values. Ideally we'd properly detect
|
||||
# if we were wrapping a method, but that is really non-trivial
|
||||
# and this is good enough.
|
||||
continue
|
||||
|
||||
set_tag(SynapseTags.FUNC_ARG_PREFIX + argspec.args[i], str(arg))
|
||||
set_tag(SynapseTags.FUNC_ARGS, str(args[len(argspec.args) :]))
|
||||
set_tag(SynapseTags.FUNC_KWARGS, str(kwargs))
|
||||
@@ -1093,9 +1094,10 @@ def trace_servlet(
|
||||
|
||||
# Mypy seems to think that start_context.tag below can be Optional[str], but
|
||||
# that doesn't appear to be correct and works in practice.
|
||||
request_tags[
|
||||
SynapseTags.REQUEST_TAG
|
||||
] = request.request_metrics.start_context.tag # type: ignore[assignment]
|
||||
|
||||
request_tags[SynapseTags.REQUEST_TAG] = (
|
||||
request.request_metrics.start_context.tag # type: ignore[assignment]
|
||||
)
|
||||
|
||||
# set the tags *after* the servlet completes, in case it decided to
|
||||
# prioritise the span (tags will get dropped on unprioritised spans)
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import urllib
|
||||
from abc import ABC, abstractmethod
|
||||
from types import TracebackType
|
||||
@@ -50,14 +49,17 @@ from twisted.web.server import Request
|
||||
from synapse.api.errors import Codes, cs_error
|
||||
from synapse.http.server import finish_request, respond_with_json
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.logging.context import (
|
||||
defer_to_threadpool,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.util import Clock
|
||||
from synapse.util.async_helpers import DeferredEvent
|
||||
from synapse.util.stringutils import is_ascii
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.storage.databases.main.media_repository import LocalMedia
|
||||
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,6 +130,7 @@ def respond_404(request: SynapseRequest) -> None:
|
||||
|
||||
|
||||
async def respond_with_file(
|
||||
hs: "HomeServer",
|
||||
request: SynapseRequest,
|
||||
media_type: str,
|
||||
file_path: str,
|
||||
@@ -144,7 +147,7 @@ async def respond_with_file(
|
||||
add_file_headers(request, media_type, file_size, upload_name)
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
await ThreadedFileSender(request.reactor).beginFileTransfer(f, request)
|
||||
await ThreadedFileSender(hs).beginFileTransfer(f, request)
|
||||
|
||||
finish_request(request)
|
||||
else:
|
||||
@@ -285,7 +288,9 @@ async def respond_with_multipart_responder(
|
||||
clock: Clock,
|
||||
request: SynapseRequest,
|
||||
responder: "Optional[Responder]",
|
||||
media_info: "LocalMedia",
|
||||
media_type: str,
|
||||
media_length: Optional[int],
|
||||
upload_name: Optional[str],
|
||||
) -> None:
|
||||
"""
|
||||
Responds to requests originating from the federation media `/download` endpoint by
|
||||
@@ -309,7 +314,7 @@ async def respond_with_multipart_responder(
|
||||
)
|
||||
return
|
||||
|
||||
if media_info.media_type.lower().split(";", 1)[0] in INLINE_CONTENT_TYPES:
|
||||
if media_type.lower().split(";", 1)[0] in INLINE_CONTENT_TYPES:
|
||||
disposition = "inline"
|
||||
else:
|
||||
disposition = "attachment"
|
||||
@@ -317,16 +322,16 @@ async def respond_with_multipart_responder(
|
||||
def _quote(x: str) -> str:
|
||||
return urllib.parse.quote(x.encode("utf-8"))
|
||||
|
||||
if media_info.upload_name:
|
||||
if _can_encode_filename_as_token(media_info.upload_name):
|
||||
if upload_name:
|
||||
if _can_encode_filename_as_token(upload_name):
|
||||
disposition = "%s; filename=%s" % (
|
||||
disposition,
|
||||
media_info.upload_name,
|
||||
upload_name,
|
||||
)
|
||||
else:
|
||||
disposition = "%s; filename*=utf-8''%s" % (
|
||||
disposition,
|
||||
_quote(media_info.upload_name),
|
||||
_quote(upload_name),
|
||||
)
|
||||
|
||||
from synapse.media.media_storage import MultipartFileConsumer
|
||||
@@ -336,14 +341,14 @@ async def respond_with_multipart_responder(
|
||||
multipart_consumer = MultipartFileConsumer(
|
||||
clock,
|
||||
request,
|
||||
media_info.media_type,
|
||||
media_type,
|
||||
{},
|
||||
disposition,
|
||||
media_info.media_length,
|
||||
media_length,
|
||||
)
|
||||
|
||||
logger.debug("Responding to media request with responder %s", responder)
|
||||
if media_info.media_length is not None:
|
||||
if media_length is not None:
|
||||
content_length = multipart_consumer.content_length()
|
||||
assert content_length is not None
|
||||
request.setHeader(b"Content-Length", b"%d" % (content_length,))
|
||||
@@ -615,10 +620,13 @@ class ThreadedFileSender:
|
||||
A producer that sends the contents of a file to a consumer, reading from the
|
||||
file on a thread.
|
||||
|
||||
This works by spawning a loop in a threadpool that repeatedly reads from the
|
||||
file and sends it to the consumer. The main thread communicates with the
|
||||
loop via two `threading.Event`, which controls when to start/pause reading
|
||||
and when to terminate.
|
||||
This works by having a loop in a threadpool repeatedly reading from the
|
||||
file, until the consumer pauses the producer. There is then a loop in the
|
||||
main thread that waits until the consumer resumes the producer and then
|
||||
starts reading in the threadpool again.
|
||||
|
||||
This is done to ensure that we're never waiting in the threadpool, as
|
||||
otherwise its easy to starve it of threads.
|
||||
"""
|
||||
|
||||
# How much data to read in one go.
|
||||
@@ -628,8 +636,9 @@ class ThreadedFileSender:
|
||||
# read.
|
||||
TIMEOUT_SECONDS = 90.0
|
||||
|
||||
def __init__(self, reactor: ISynapseReactor) -> None:
|
||||
self.reactor = reactor
|
||||
def __init__(self, hs: "HomeServer") -> None:
|
||||
self.reactor = hs.get_reactor()
|
||||
self.thread_pool = hs.get_media_sender_thread_pool()
|
||||
|
||||
self.file: Optional[BinaryIO] = None
|
||||
self.deferred: "Deferred[None]" = Deferred()
|
||||
@@ -637,12 +646,11 @@ class ThreadedFileSender:
|
||||
|
||||
# Signals if the thread should keep reading/sending data. Set means
|
||||
# continue, clear means pause.
|
||||
self.wakeup_event = threading.Event()
|
||||
self.wakeup_event = DeferredEvent(self.reactor)
|
||||
|
||||
# Signals if the thread should terminate, e.g. because the consumer has
|
||||
# gone away. Both this and `wakeup_event` should be set to terminate the
|
||||
# loop (otherwise the thread will block on `wakeup_event`).
|
||||
self.stop_event = threading.Event()
|
||||
# gone away.
|
||||
self.stop_writing = False
|
||||
|
||||
def beginFileTransfer(
|
||||
self, file: BinaryIO, consumer: interfaces.IConsumer
|
||||
@@ -657,7 +665,7 @@ class ThreadedFileSender:
|
||||
|
||||
# We set the wakeup signal as we should start producing immediately.
|
||||
self.wakeup_event.set()
|
||||
defer_to_thread(self.reactor, self._on_thread_read_loop)
|
||||
run_in_background(self.start_read_loop)
|
||||
|
||||
return make_deferred_yieldable(self.deferred)
|
||||
|
||||
@@ -672,42 +680,58 @@ class ThreadedFileSender:
|
||||
def stopProducing(self) -> None:
|
||||
"""interfaces.IPushProducer"""
|
||||
|
||||
# Terminate the thread loop.
|
||||
# Unregister the consumer so we don't try and interact with it again.
|
||||
if self.consumer:
|
||||
self.consumer.unregisterProducer()
|
||||
|
||||
self.consumer = None
|
||||
|
||||
# Terminate the loop.
|
||||
self.stop_writing = True
|
||||
self.wakeup_event.set()
|
||||
self.stop_event.set()
|
||||
|
||||
if not self.deferred.called:
|
||||
self.deferred.errback(Exception("Consumer asked us to stop producing"))
|
||||
|
||||
def _on_thread_read_loop(self) -> None:
|
||||
"""This is the loop that happens on a thread."""
|
||||
|
||||
async def start_read_loop(self) -> None:
|
||||
"""This is the loop that drives reading/writing"""
|
||||
try:
|
||||
while not self.stop_event.is_set():
|
||||
# We wait for the producer to signal that the consumer wants
|
||||
# more data (or we should abort)
|
||||
while not self.stop_writing:
|
||||
# Start the loop in the threadpool to read data.
|
||||
more_data = await defer_to_threadpool(
|
||||
self.reactor, self.thread_pool, self._on_thread_read_loop
|
||||
)
|
||||
if not more_data:
|
||||
# Reached EOF, we can just return.
|
||||
return
|
||||
|
||||
if not self.wakeup_event.is_set():
|
||||
ret = self.wakeup_event.wait(self.TIMEOUT_SECONDS)
|
||||
ret = await self.wakeup_event.wait(self.TIMEOUT_SECONDS)
|
||||
if not ret:
|
||||
raise Exception("Timed out waiting to resume")
|
||||
|
||||
# Check if we were woken up so that we abort the download
|
||||
if self.stop_event.is_set():
|
||||
return
|
||||
|
||||
# The file should always have been set before we get here.
|
||||
assert self.file is not None
|
||||
|
||||
chunk = self.file.read(self.CHUNK_SIZE)
|
||||
if not chunk:
|
||||
return
|
||||
|
||||
self.reactor.callFromThread(self._write, chunk)
|
||||
|
||||
except Exception:
|
||||
self.reactor.callFromThread(self._error, Failure())
|
||||
self._error(Failure())
|
||||
finally:
|
||||
self.reactor.callFromThread(self._finish)
|
||||
self._finish()
|
||||
|
||||
def _on_thread_read_loop(self) -> bool:
|
||||
"""This is the loop that happens on a thread.
|
||||
|
||||
Returns:
|
||||
Whether there is more data to send.
|
||||
"""
|
||||
|
||||
while not self.stop_writing and self.wakeup_event.is_set():
|
||||
# The file should always have been set before we get here.
|
||||
assert self.file is not None
|
||||
|
||||
chunk = self.file.read(self.CHUNK_SIZE)
|
||||
if not chunk:
|
||||
return False
|
||||
|
||||
self.reactor.callFromThread(self._write, chunk)
|
||||
|
||||
return True
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
"""Called from the thread to write a chunk of data"""
|
||||
@@ -715,7 +739,7 @@ class ThreadedFileSender:
|
||||
self.consumer.write(chunk)
|
||||
|
||||
def _error(self, failure: Failure) -> None:
|
||||
"""Called from the thread when there was a fatal error"""
|
||||
"""Called when there was a fatal error"""
|
||||
if self.consumer:
|
||||
self.consumer.unregisterProducer()
|
||||
self.consumer = None
|
||||
@@ -724,7 +748,7 @@ class ThreadedFileSender:
|
||||
self.deferred.errback(failure)
|
||||
|
||||
def _finish(self) -> None:
|
||||
"""Called from the thread when it finishes (either on success or
|
||||
"""Called when we have finished writing (either on success or
|
||||
failure)."""
|
||||
if self.file:
|
||||
self.file.close()
|
||||
|
||||
@@ -471,7 +471,7 @@ class MediaRepository:
|
||||
responder = await self.media_storage.fetch_media(file_info)
|
||||
if federation:
|
||||
await respond_with_multipart_responder(
|
||||
self.clock, request, responder, media_info
|
||||
self.clock, request, responder, media_type, media_length, upload_name
|
||||
)
|
||||
else:
|
||||
await respond_with_responder(
|
||||
@@ -1008,7 +1008,7 @@ class MediaRepository:
|
||||
t_method: str,
|
||||
t_type: str,
|
||||
url_cache: bool,
|
||||
) -> Optional[str]:
|
||||
) -> Optional[Tuple[str, FileInfo]]:
|
||||
input_path = await self.media_storage.ensure_media_is_in_local_cache(
|
||||
FileInfo(None, media_id, url_cache=url_cache)
|
||||
)
|
||||
@@ -1070,7 +1070,7 @@ class MediaRepository:
|
||||
t_len,
|
||||
)
|
||||
|
||||
return output_path
|
||||
return output_path, file_info
|
||||
|
||||
# Could not generate thumbnail.
|
||||
return None
|
||||
|
||||
@@ -57,7 +57,7 @@ from synapse.media._base import ThreadedFileSender
|
||||
from synapse.util import Clock
|
||||
from synapse.util.file_consumer import BackgroundFileConsumer
|
||||
|
||||
from ..types import ISynapseReactor, JsonDict
|
||||
from ..types import JsonDict
|
||||
from ._base import FileInfo, Responder
|
||||
from .filepath import MediaFilePaths
|
||||
|
||||
@@ -209,7 +209,7 @@ class MediaStorage:
|
||||
local_path = os.path.join(self.local_media_directory, path)
|
||||
if os.path.exists(local_path):
|
||||
logger.debug("responding with local file %s", local_path)
|
||||
return FileResponder(self.reactor, open(local_path, "rb"))
|
||||
return FileResponder(self.hs, open(local_path, "rb"))
|
||||
logger.debug("local file %s did not exist", local_path)
|
||||
|
||||
for provider in self.storage_providers:
|
||||
@@ -332,14 +332,12 @@ class FileResponder(Responder):
|
||||
is closed when finished streaming.
|
||||
"""
|
||||
|
||||
def __init__(self, reactor: ISynapseReactor, open_file: BinaryIO):
|
||||
self.reactor = reactor
|
||||
def __init__(self, hs: "HomeServer", open_file: BinaryIO):
|
||||
self.hs = hs
|
||||
self.open_file = open_file
|
||||
|
||||
def write_to_consumer(self, consumer: IConsumer) -> Deferred:
|
||||
return ThreadedFileSender(self.reactor).beginFileTransfer(
|
||||
self.open_file, consumer
|
||||
)
|
||||
return ThreadedFileSender(self.hs).beginFileTransfer(self.open_file, consumer)
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
@@ -546,7 +544,7 @@ class MultipartFileConsumer:
|
||||
Calculate the content length of the multipart response
|
||||
in bytes.
|
||||
"""
|
||||
if not self.length:
|
||||
if self.length is None:
|
||||
return None
|
||||
# calculate length of json field and content-type, disposition headers
|
||||
json_field = json.dumps(self.json_field)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user