mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-09 01:30:18 +00:00
Compare commits
399 Commits
quenting/t
...
hs/delayed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ce56923da | ||
|
|
f1695ac20e | ||
|
|
9d81bb703c | ||
|
|
40893be93c | ||
|
|
1419b35a40 | ||
|
|
a2fa61d1b5 | ||
|
|
123eff1bc0 | ||
|
|
a092d2053a | ||
|
|
45a042ae88 | ||
|
|
72d0de9f30 | ||
|
|
5556b491c1 | ||
|
|
b835eb253c | ||
|
|
fc244bb592 | ||
|
|
cba3a814c6 | ||
|
|
3b59ac3b69 | ||
|
|
ff242faad0 | ||
|
|
6c16734cf3 | ||
|
|
4427908340 | ||
|
|
2f65b9e001 | ||
|
|
1271e896b5 | ||
|
|
418c9f3fe5 | ||
|
|
eac862629f | ||
|
|
67f22a200d | ||
|
|
da6c0cae96 | ||
|
|
b8f6ad2736 | ||
|
|
ecc90593cb | ||
|
|
a4f9274107 | ||
|
|
ec7554b768 | ||
|
|
d2c582ef3c | ||
|
|
2d07bd7fd2 | ||
|
|
a7303c5311 | ||
|
|
690b3a4fcc | ||
|
|
d399d7649a | ||
|
|
9d9275da5a | ||
|
|
ef80338c2d | ||
|
|
be75de2cfc | ||
|
|
07cfb69778 | ||
|
|
c0d6998dea | ||
|
|
8390138fa4 | ||
|
|
627be7e0a7 | ||
|
|
47fb4b43ca | ||
|
|
715cc5ee37 | ||
|
|
d440cfc9e2 | ||
|
|
18f07fdc4c | ||
|
|
e3344dc0c3 | ||
|
|
bcbbccca23 | ||
|
|
8f01eb8ee0 | ||
|
|
21d125e29a | ||
|
|
638fa0f33d | ||
|
|
38afd10823 | ||
|
|
87cfe56d14 | ||
|
|
631eed91f1 | ||
|
|
7b8831310f | ||
|
|
fb12d516cd | ||
|
|
dde4e0e83d | ||
|
|
8696551e7f | ||
|
|
28bc486bff | ||
|
|
ca27938257 | ||
|
|
036fb87584 | ||
|
|
abe974cd2b | ||
|
|
5e3839e2af | ||
|
|
0ae1f105b2 | ||
|
|
2443760d0d | ||
|
|
4f7ffc13a7 | ||
|
|
340bdd896a | ||
|
|
957456ed3a | ||
|
|
459ebe07fc | ||
|
|
527e831b61 | ||
|
|
76b012c3f5 | ||
|
|
7069636c2d | ||
|
|
dde1e012a4 | ||
|
|
533d5e0a7a | ||
|
|
26aaaf9e48 | ||
|
|
4a37c4d87a | ||
|
|
d67280f5d8 | ||
|
|
42bbff8294 | ||
|
|
5465c68553 | ||
|
|
1d2ddbc76e | ||
|
|
70c044db8e | ||
|
|
6835e7be0d | ||
|
|
d27ff161f5 | ||
|
|
06a84f4fe0 | ||
|
|
1c093509ce | ||
|
|
0615b64bb4 | ||
|
|
c284d8cb24 | ||
|
|
5fff5a1893 | ||
|
|
765817a1ad | ||
|
|
396de6544a | ||
|
|
d1c96ee0f2 | ||
|
|
5adb08f3c9 | ||
|
|
2aab171042 | ||
|
|
0aeb95fb07 | ||
|
|
72020f3f2c | ||
|
|
ad8dcc2119 | ||
|
|
84e1d15232 | ||
|
|
2b7a398b14 | ||
|
|
81848e8193 | ||
|
|
be3ecb332a | ||
|
|
14c114b9fd | ||
|
|
2eb6239ad8 | ||
|
|
26583f8623 | ||
|
|
265e5fe384 | ||
|
|
5143f93dc9 | ||
|
|
2f2b854ac1 | ||
|
|
8f61bdb470 | ||
|
|
7c32988f6b | ||
|
|
688f635b59 | ||
|
|
04721c85e6 | ||
|
|
d2a966f922 | ||
|
|
dee6ba57a6 | ||
|
|
e2ec3b7d0d | ||
|
|
acb9ec3c38 | ||
|
|
6ff181dbc7 | ||
|
|
fd8fa97b6a | ||
|
|
5266e423e2 | ||
|
|
0458f691b6 | ||
|
|
25fa555395 | ||
|
|
7708801d56 | ||
|
|
d3fc638c29 | ||
|
|
6c292dc4ee | ||
|
|
120389b077 | ||
|
|
71b34b3a07 | ||
|
|
e766f325af | ||
|
|
512b3f50cf | ||
|
|
0fbf296c99 | ||
|
|
0c8594c9a8 | ||
|
|
35c9cbb09d | ||
|
|
9680804496 | ||
|
|
8f63e2246a | ||
|
|
aa83d660d5 | ||
|
|
641ced06a2 | ||
|
|
354f1cc219 | ||
|
|
478f593b6c | ||
|
|
cd6c424adb | ||
|
|
b70f668a8c | ||
|
|
9c4ba13a10 | ||
|
|
0447496549 | ||
|
|
9ed0d36fe2 | ||
|
|
5857d2de59 | ||
|
|
b10f3f5959 | ||
|
|
fd29e3219c | ||
|
|
d308469e90 | ||
|
|
daf33e4954 | ||
|
|
ddc7627b22 | ||
|
|
5be7679dd9 | ||
|
|
e7d98d3429 | ||
|
|
d05f44a1c6 | ||
|
|
8d5d87fb0a | ||
|
|
9a88d25f8e | ||
|
|
5a9ca1e3d9 | ||
|
|
83aca3f097 | ||
|
|
d80f515622 | ||
|
|
4367fb2d07 | ||
|
|
b596faa4ec | ||
|
|
6f9fab1089 | ||
|
|
84d64251dc | ||
|
|
2bed3fb566 | ||
|
|
2c60b67a95 | ||
|
|
6358afff8d | ||
|
|
f7b547e2d8 | ||
|
|
8f7bd946de | ||
|
|
4f80fa4b0a | ||
|
|
b2592667a4 | ||
|
|
769d30a247 | ||
|
|
7ecfe8b1a8 | ||
|
|
e1036ffa48 | ||
|
|
8c98cf7e55 | ||
|
|
ec64c3e88d | ||
|
|
ada3a3b2b3 | ||
|
|
9cc4001778 | ||
|
|
c68c5dd07b | ||
|
|
92bdf77c3f | ||
|
|
e43bf10187 | ||
|
|
6146dbad3e | ||
|
|
ca655e4020 | ||
|
|
7951d41b4e | ||
|
|
e235099ab9 | ||
|
|
3e865e403b | ||
|
|
35e7e659f6 | ||
|
|
39e4f27347 | ||
|
|
6fe8137a4a | ||
|
|
fcffd2e897 | ||
|
|
d48e69ad4c | ||
|
|
74fdbc7b75 | ||
|
|
4d55f2f301 | ||
|
|
dfccde9f60 | ||
|
|
4b43e6fe02 | ||
|
|
b2997a8f20 | ||
|
|
bff4a11b3f | ||
|
|
09a489e198 | ||
|
|
537e14169e | ||
|
|
68068de3a4 | ||
|
|
356cc4a0a1 | ||
|
|
27fc3389f3 | ||
|
|
df2cfb3932 | ||
|
|
c339021ce8 | ||
|
|
499f947c67 | ||
|
|
e76a9af4d7 | ||
|
|
eec1ca6e93 | ||
|
|
56b5759c0f | ||
|
|
767177ca5a | ||
|
|
5b8e6e7911 | ||
|
|
6a6be6fbe2 | ||
|
|
21c7841228 | ||
|
|
5b55e3f15d | ||
|
|
0e2b92bcbc | ||
|
|
481987eb83 | ||
|
|
5fd30c7ea7 | ||
|
|
d527c794fb | ||
|
|
19fe3f001e | ||
|
|
f8a44638eb | ||
|
|
7ec5e60671 | ||
|
|
48184eefa3 | ||
|
|
205d9e4fc4 | ||
|
|
40edb10a98 | ||
|
|
3d7e39b2ea | ||
|
|
c51da9bac0 | ||
|
|
4cee8c7b99 | ||
|
|
4ac656073d | ||
|
|
3212526673 | ||
|
|
c0878ac9e6 | ||
|
|
76c9f09e09 | ||
|
|
5c20a60f0b | ||
|
|
3671bdbc51 | ||
|
|
7e60ca70c8 | ||
|
|
9135d78b88 | ||
|
|
3e10b3392f | ||
|
|
40e4e379da | ||
|
|
87ba085cdf | ||
|
|
7e3e9a6d60 | ||
|
|
874c6b38f7 | ||
|
|
09aa3fc270 | ||
|
|
14e93d8043 | ||
|
|
6d39e3a411 | ||
|
|
f7aa36926e | ||
|
|
283ade8e33 | ||
|
|
1f155c9650 | ||
|
|
6679c719e3 | ||
|
|
073ce74464 | ||
|
|
a93ec56cec | ||
|
|
e8c6cb3d9e | ||
|
|
3bb95d4a9d | ||
|
|
526b875e03 | ||
|
|
d27438bc25 | ||
|
|
8f375ea6c1 | ||
|
|
3db9fa3eeb | ||
|
|
0c0a9fafde | ||
|
|
4054d956f7 | ||
|
|
04932c76f5 | ||
|
|
9244948750 | ||
|
|
fdd63882b1 | ||
|
|
1e45f35eb6 | ||
|
|
9301baa5f8 | ||
|
|
576022912b | ||
|
|
848949a727 | ||
|
|
3f37bd6277 | ||
|
|
a89afc733b | ||
|
|
f0656a3b06 | ||
|
|
2c434e5187 | ||
|
|
9f579b36c8 | ||
|
|
a407357eec | ||
|
|
92b0077b27 | ||
|
|
7e8782f47f | ||
|
|
8fe3c73f95 | ||
|
|
81f815ee33 | ||
|
|
3108fa32d3 | ||
|
|
a1a40523ae | ||
|
|
e65a6fc58a | ||
|
|
bd8f12f9c6 | ||
|
|
0eb7252a23 | ||
|
|
15146c2259 | ||
|
|
340e4de5af | ||
|
|
88a24bdd13 | ||
|
|
7aac7db652 | ||
|
|
a8886d3351 | ||
|
|
da23e8acde | ||
|
|
2f3a075514 | ||
|
|
87d80b0f9a | ||
|
|
731e81c9a3 | ||
|
|
6dd6bb4714 | ||
|
|
7ed4f65561 | ||
|
|
3a01e9d3d2 | ||
|
|
e587b8c2ee | ||
|
|
2cee540022 | ||
|
|
ff03a51cb0 | ||
|
|
6514381b02 | ||
|
|
8306cee06a | ||
|
|
d49185972d | ||
|
|
aefd3949ab | ||
|
|
1bb3084e34 | ||
|
|
076db0ab49 | ||
|
|
ae7883d1f4 | ||
|
|
43f0c6fd62 | ||
|
|
c7762cd55e | ||
|
|
357b749bf3 | ||
|
|
20615115fb | ||
|
|
ddbcd859aa | ||
|
|
7ed55666b5 | ||
|
|
8c71875195 | ||
|
|
bbe78c253c | ||
|
|
72cd5cccf7 | ||
|
|
e16fbdcdcc | ||
|
|
e43a1cec84 | ||
|
|
510924a2f6 | ||
|
|
3b5b6f6152 | ||
|
|
edac7a471f | ||
|
|
c15001d765 | ||
|
|
a6e326582f | ||
|
|
cd339d52b6 | ||
|
|
e7348406a3 | ||
|
|
4a01e2df47 | ||
|
|
2465659942 | ||
|
|
501b96134c | ||
|
|
f8887a64e4 | ||
|
|
8551e0f0af | ||
|
|
25289b6444 | ||
|
|
86370979d9 | ||
|
|
664f0e8938 | ||
|
|
ea87853188 | ||
|
|
caf5f0110e | ||
|
|
a31d53b28f | ||
|
|
16a639e0fe | ||
|
|
a2ba909ded | ||
|
|
c823d2e98a | ||
|
|
7ae7468159 | ||
|
|
d4af2970f3 | ||
|
|
31a38f57f5 | ||
|
|
5b8b45a16d | ||
|
|
3d683350e9 | ||
|
|
106afe4984 | ||
|
|
5106818bd0 | ||
|
|
f13a136396 | ||
|
|
2c236be058 | ||
|
|
458e6410e8 | ||
|
|
1dd5f68251 | ||
|
|
8344c944b1 | ||
|
|
b34342eedf | ||
|
|
61e79a4cdf | ||
|
|
b7e7f537f1 | ||
|
|
8fb9c105c9 | ||
|
|
a82b8a966a | ||
|
|
f5f2c9587e | ||
|
|
0be7fe926d | ||
|
|
98f84256e9 | ||
|
|
15b927ffab | ||
|
|
7fa88d6d07 | ||
|
|
9ecf192089 | ||
|
|
6838a1020b | ||
|
|
a77befcc29 | ||
|
|
cedb8cd045 | ||
|
|
bb84121553 | ||
|
|
7de4fdf61a | ||
|
|
8fc9aa70a5 | ||
|
|
3db73b974f | ||
|
|
c51bd89c3b | ||
|
|
7de9ac01a0 | ||
|
|
4e118aecd0 | ||
|
|
11a11414c5 | ||
|
|
8a4e2e826d | ||
|
|
875269eb53 | ||
|
|
56f5097d1c | ||
|
|
797fa5728d | ||
|
|
c58d7ade38 | ||
|
|
6127aa0d50 | ||
|
|
5ea2cf2484 | ||
|
|
66504d1144 | ||
|
|
cda922830e | ||
|
|
f0f9a82ca4 | ||
|
|
f031105eee | ||
|
|
a0d6469069 | ||
|
|
84991317d0 | ||
|
|
56c166cbf0 | ||
|
|
a07e26a936 | ||
|
|
b07dc6a27d | ||
|
|
42297bfceb | ||
|
|
88785dbaeb | ||
|
|
fc10a5ee29 | ||
|
|
d72c278a07 | ||
|
|
b274d6561c | ||
|
|
49cb78376e | ||
|
|
88f38ea149 | ||
|
|
5f027adb33 | ||
|
|
e6dbbbb315 | ||
|
|
78ce4dc26f | ||
|
|
97d2738eef | ||
|
|
945e22303c | ||
|
|
481c4e2b55 | ||
|
|
5129668449 | ||
|
|
3c13c3bebf | ||
|
|
1e5e6a48be | ||
|
|
947216abc0 | ||
|
|
c5999cf452 | ||
|
|
28c9ed3ccb | ||
|
|
1dc29563c1 | ||
|
|
66daf0bfae | ||
|
|
b9b8775db7 | ||
|
|
e1b429d88e | ||
|
|
8c1e60045c | ||
|
|
bf0370162f |
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
||||
|
||||
# First calculate the various sytest jobs.
|
||||
#
|
||||
# For each type of test we only run on bullseye on PRs
|
||||
# For each type of test we only run on bookworm on PRs
|
||||
|
||||
|
||||
sytest_tests = [
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
@@ -127,11 +127,11 @@ if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
|
||||
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
||||
echo "+++ Comparing ported schema with unported schema"
|
||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
|
||||
29
.ci/scripts/triage_labelled_issue.sh
Executable file
29
.ci/scripts/triage_labelled_issue.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# 1) Resolve project ID.
|
||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
||||
|
||||
# 2) Find existing item (project card) for this issue.
|
||||
ITEM_ID=$(
|
||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
||||
)
|
||||
|
||||
# 3) If one doesn't exist, add this issue to the project.
|
||||
if [ -z "${ITEM_ID:-}" ]; then
|
||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
||||
fi
|
||||
|
||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
||||
|
||||
if [ -z "${OPTION_ID:-}" ]; then
|
||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
||||
fi
|
||||
|
||||
# 5) Set Status (moves item to the matching column in the board view).
|
||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
||||
16
.github/workflows/docker.yml
vendored
16
.github/workflows/docker.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
@@ -41,13 +41,13 @@ jobs:
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -95,21 +95,21 @@ jobs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -120,10 +120,10 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac # v3.9.1
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
|
||||
6
.github/workflows/docs-pr.yaml
vendored
6
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
|
||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
|
||||
6
.github/workflows/fix_lint.yaml
vendored
6
.github/workflows/fix_lint.yaml
vendored
@@ -18,14 +18,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
||||
32
.github/workflows/latest_deps.yml
vendored
32
.github/workflows/latest_deps.yml
vendored
@@ -42,12 +42,12 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry update --without dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
@@ -77,13 +77,13 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -139,9 +139,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
@@ -152,13 +152,13 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
@@ -202,14 +202,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
|
||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
|
||||
24
.github/workflows/release-artifacts.yml
vendored
24
.github/workflows/release-artifacts.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: src
|
||||
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -114,8 +114,8 @@ jobs:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
- macos-15-intel # This uses x86-64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
os: "macos-15-intel"
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
@@ -132,9 +132,9 @@ jobs:
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
@@ -165,8 +165,8 @@ jobs:
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -191,7 +191,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
|
||||
8
.github/workflows/schema.yaml
vendored
8
.github/workflows/schema.yaml
vendored
@@ -14,8 +14,8 @@ jobs:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
@@ -40,8 +40,8 @@ jobs:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
|
||||
150
.github/workflows/tests.yml
vendored
150
.github/workflows/tests.yml
vendored
@@ -86,12 +86,12 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -106,8 +106,8 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
@@ -116,8 +116,8 @@ jobs:
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -151,13 +151,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
@@ -195,11 +195,11 @@ jobs:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
@@ -213,14 +213,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
@@ -233,14 +233,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -252,32 +252,70 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
lint-rust:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# Install like a normal project from source with all optional dependencies
|
||||
extras: all
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
||||
run: |
|
||||
if git diff --quiet Cargo.lock; then
|
||||
echo "Cargo.lock is up to date"
|
||||
else
|
||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
||||
git diff --exit-code Cargo.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
# We use nightly so that we can use some unstable options that we use in
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -288,8 +326,8 @@ jobs:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
@@ -309,6 +347,7 @@ jobs:
|
||||
- check-lockfile
|
||||
- lint-clippy
|
||||
- lint-clippy-nightly
|
||||
- lint-rust
|
||||
- lint-rustfmt
|
||||
- lint-readme
|
||||
runs-on: ubuntu-latest
|
||||
@@ -327,6 +366,7 @@ jobs:
|
||||
lint-pydantic
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
lint-rustfmt
|
||||
lint-readme
|
||||
|
||||
@@ -336,8 +376,8 @@ jobs:
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
@@ -357,7 +397,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -372,10 +412,10 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -413,13 +453,13 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -428,7 +468,7 @@ jobs:
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
@@ -478,7 +518,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -528,15 +568,15 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -575,7 +615,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -619,7 +659,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
@@ -674,20 +714,20 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -710,13 +750,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -730,13 +770,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
||||
49
.github/workflows/triage_labelled.yml
vendored
49
.github/workflows/triage_labelled.yml
vendored
@@ -6,39 +6,26 @@ on:
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits)
|
||||
id: add_project
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
- name: Set status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
run: |
|
||||
gh api graphql -f query='
|
||||
mutation(
|
||||
$project: ID!
|
||||
$item: ID!
|
||||
$fieldid: ID!
|
||||
$columnid: String!
|
||||
) {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $project
|
||||
itemId: $item
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: $columnid
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
28
.github/workflows/twisted_trunk.yml
vendored
28
.github/workflows/twisted_trunk.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -70,14 +70,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -108,22 +108,22 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:bullseye
|
||||
image: matrixdotorg/sytest-synapse:bookworm
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
@@ -175,14 +175,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -1 +1,6 @@
|
||||
# Unstable options are only available on a nightly toolchain and must be opted into
|
||||
unstable_features = true
|
||||
|
||||
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
|
||||
# https://github.com/rust-lang/rustfmt/issues/5083
|
||||
group_imports = "StdExternalCrate"
|
||||
|
||||
573
CHANGES.md
573
CHANGES.md
@@ -1,3 +1,576 @@
|
||||
# Synapse 1.141.0rc1 (2025-10-21)
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and eventually stop publishing python wheels
|
||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
||||
building Synapse from source (though note this requires a Rust compiler).
|
||||
|
||||
Publishing MacOS Python wheels will continue for the next few releases. If you
|
||||
do make use of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
## Features
|
||||
|
||||
- Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper. ([\#19031](https://github.com/element-hq/synapse/issues/19031))
|
||||
- Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper. ([\#19033](https://github.com/element-hq/synapse/issues/19033))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd. ([\#19060](https://github.com/element-hq/synapse/issues/19060))
|
||||
- Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long. ([\#19078](https://github.com/element-hq/synapse/issues/19078))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Update docker image to use Debian trixie as the base and thus Python 3.13. ([\#19064](https://github.com/element-hq/synapse/issues/19064))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing). ([\#19037](https://github.com/element-hq/synapse/issues/19037))
|
||||
- Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`. ([\#19039](https://github.com/element-hq/synapse/issues/19039))
|
||||
- Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`). ([\#19040](https://github.com/element-hq/synapse/issues/19040))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0 (2025-10-14)
|
||||
|
||||
## Compatibility notice for users of `synapse-s3-storage-provider`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider)
|
||||
module must upgrade to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0).
|
||||
Using older versions of the module with this release of Synapse will prevent
|
||||
users from being able to upload or download media.
|
||||
|
||||
|
||||
No significant changes since 1.140.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0rc1 (2025-10-10)
|
||||
|
||||
## Features
|
||||
|
||||
- Add [a new Media Query by ID Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/media_admin_api.html#query-a-piece-of-media-by-id) that allows server admins to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris). ([\#18911](https://github.com/element-hq/synapse/issues/18911))
|
||||
- Add [a new Fetch Event Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/fetch_event.html) to fetch an event by ID. ([\#18963](https://github.com/element-hq/synapse/issues/18963))
|
||||
- Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available. ([\#18934](https://github.com/element-hq/synapse/issues/18934))
|
||||
- Add experimental implementation of the `GET /_matrix/client/v1/rtc/transports` endpoint for the latest draft of [MSC4143: MatrixRTC](https://github.com/matrix-org/matrix-spec-proposals/pull/4143). ([\#18967](https://github.com/element-hq/synapse/issues/18967))
|
||||
- Expose a `defer_to_threadpool` function in the Synapse Module API that allows modules to run a function on a separate thread in a custom threadpool. ([\#19032](https://github.com/element-hq/synapse/issues/19032))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback. ([\#18721](https://github.com/element-hq/synapse/issues/18721))
|
||||
- Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`. ([\#18948](https://github.com/element-hq/synapse/issues/18948))
|
||||
- Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini. ([\#19002](https://github.com/element-hq/synapse/issues/19002))
|
||||
- Update Synapse main process version string to include git info. ([\#19011](https://github.com/element-hq/synapse/issues/19011))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Explain how `Deferred` callbacks interact with logcontexts. ([\#18914](https://github.com/element-hq/synapse/issues/18914))
|
||||
- Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported. ([\#18998](https://github.com/element-hq/synapse/issues/18998))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`. ([\#18989](https://github.com/element-hq/synapse/issues/18989))
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Cleanly shutdown `SynapseHomeServer` object, allowing artifacts of embedded small hosts to be properly garbage collected. ([\#18828](https://github.com/element-hq/synapse/issues/18828))
|
||||
- Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc. ([\#18767](https://github.com/element-hq/synapse/issues/18767))
|
||||
- Fix `server_name` in logging context for multiple Synapse instances in one process. ([\#18868](https://github.com/element-hq/synapse/issues/18868))
|
||||
- Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules. ([\#18903](https://github.com/element-hq/synapse/issues/18903))
|
||||
- Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board. ([\#18913](https://github.com/element-hq/synapse/issues/18913))
|
||||
- Disconnect background process work from request trace. ([\#18932](https://github.com/element-hq/synapse/issues/18932))
|
||||
- Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load. ([\#18939](https://github.com/element-hq/synapse/issues/18939))
|
||||
- Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper. ([\#18947](https://github.com/element-hq/synapse/issues/18947))
|
||||
- Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled). ([\#18959](https://github.com/element-hq/synapse/issues/18959))
|
||||
- Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
- Add debug logs wherever we change current logcontext. ([\#18966](https://github.com/element-hq/synapse/issues/18966))
|
||||
- Update dockerfile metadata to fix broken link; point to documentation website. ([\#18971](https://github.com/element-hq/synapse/issues/18971))
|
||||
- Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs. ([\#18973](https://github.com/element-hq/synapse/issues/18973))
|
||||
- Fix logcontext handling in `timeout_deferred` tests. ([\#18974](https://github.com/element-hq/synapse/issues/18974))
|
||||
- Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process. ([\#18988](https://github.com/element-hq/synapse/issues/18988))
|
||||
- Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils. ([\#18990](https://github.com/element-hq/synapse/issues/18990))
|
||||
- Remove `MockClock()` in tests. ([\#18992](https://github.com/element-hq/synapse/issues/18992))
|
||||
- Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled. ([\#19007](https://github.com/element-hq/synapse/issues/19007))
|
||||
- Remove `version_string` argument from `HomeServer` since it's always the same. ([\#19012](https://github.com/element-hq/synapse/issues/19012))
|
||||
- Remove duplicate call to `hs.start_background_tasks()` introduced from a bad merge. ([\#19013](https://github.com/element-hq/synapse/issues/19013))
|
||||
- Split homeserver creation (`create_homeserver`) and setup (`setup`). ([\#19015](https://github.com/element-hq/synapse/issues/19015))
|
||||
- Swap near-end-of-life `macos-13` GitHub Actions runner for the `macos-15-intel` variant. ([\#19025](https://github.com/element-hq/synapse/issues/19025))
|
||||
- Introduce `RootConfig.validate_config()` which can be subclassed in `HomeServerConfig` to do cross-config class validation. ([\#19027](https://github.com/element-hq/synapse/issues/19027))
|
||||
- Allow any command of the `release.py` script to accept a `--gh-token` argument. ([\#19035](https://github.com/element-hq/synapse/issues/19035))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.8.0 to 2.8.1. ([\#18949](https://github.com/element-hq/synapse/issues/18949))
|
||||
* Bump actions/cache from 4.2.4 to 4.3.0. ([\#18983](https://github.com/element-hq/synapse/issues/18983))
|
||||
* Bump anyhow from 1.0.99 to 1.0.100. ([\#18950](https://github.com/element-hq/synapse/issues/18950))
|
||||
* Bump authlib from 1.6.3 to 1.6.4. ([\#18957](https://github.com/element-hq/synapse/issues/18957))
|
||||
* Bump authlib from 1.6.4 to 1.6.5. ([\#19019](https://github.com/element-hq/synapse/issues/19019))
|
||||
* Bump bcrypt from 4.3.0 to 5.0.0. ([\#18984](https://github.com/element-hq/synapse/issues/18984))
|
||||
* Bump docker/login-action from 3.5.0 to 3.6.0. ([\#18978](https://github.com/element-hq/synapse/issues/18978))
|
||||
* Bump lxml from 6.0.0 to 6.0.2. ([\#18979](https://github.com/element-hq/synapse/issues/18979))
|
||||
* Bump phonenumbers from 9.0.13 to 9.0.14. ([\#18954](https://github.com/element-hq/synapse/issues/18954))
|
||||
* Bump phonenumbers from 9.0.14 to 9.0.15. ([\#18991](https://github.com/element-hq/synapse/issues/18991))
|
||||
* Bump prometheus-client from 0.22.1 to 0.23.1. ([\#19016](https://github.com/element-hq/synapse/issues/19016))
|
||||
* Bump pydantic from 2.11.9 to 2.11.10. ([\#19017](https://github.com/element-hq/synapse/issues/19017))
|
||||
* Bump pygithub from 2.7.0 to 2.8.1. ([\#18952](https://github.com/element-hq/synapse/issues/18952))
|
||||
* Bump regex from 1.11.2 to 1.11.3. ([\#18981](https://github.com/element-hq/synapse/issues/18981))
|
||||
* Bump serde from 1.0.224 to 1.0.226. ([\#18953](https://github.com/element-hq/synapse/issues/18953))
|
||||
* Bump serde from 1.0.226 to 1.0.228. ([\#18982](https://github.com/element-hq/synapse/issues/18982))
|
||||
* Bump setuptools-rust from 1.11.1 to 1.12.0. ([\#18980](https://github.com/element-hq/synapse/issues/18980))
|
||||
* Bump twine from 6.1.0 to 6.2.0. ([\#18985](https://github.com/element-hq/synapse/issues/18985))
|
||||
* Bump types-pyyaml from 6.0.12.20250809 to 6.0.12.20250915. ([\#19018](https://github.com/element-hq/synapse/issues/19018))
|
||||
* Bump types-requests from 2.32.4.20250809 to 2.32.4.20250913. ([\#18951](https://github.com/element-hq/synapse/issues/18951))
|
||||
* Bump typing-extensions from 4.14.1 to 4.15.0. ([\#18956](https://github.com/element-hq/synapse/issues/18956))
|
||||
|
||||
# Synapse 1.139.2 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.1 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.4 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.138.3 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.3 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
# Synapse 1.139.0 (2025-09-30)
|
||||
|
||||
### `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
If you are using Matrix Authentication Service (MAS), as of this release any
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the
|
||||
upgrade
|
||||
notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas)
|
||||
for more information.
|
||||
|
||||
No significant changes since 1.139.0rc3.
|
||||
|
||||
|
||||
# Synapse 1.139.0rc3 (2025-09-25)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc2 (2025-09-23)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.139.0rc1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc1 (2025-09-23)
|
||||
|
||||
## Features
|
||||
|
||||
- Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. ([\#18695](https://github.com/element-hq/synapse/issues/18695))
|
||||
- Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow a newer draft. ([\#18846](https://github.com/element-hq/synapse/issues/18846))
|
||||
- Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848))
|
||||
- Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898))
|
||||
- Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899))
|
||||
- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Ensure all PDUs sent via `/send` pass canonical JSON checks. ([\#18641](https://github.com/element-hq/synapse/issues/18641))
|
||||
- Fix bug where we did not send invite revocations over federation. ([\#18823](https://github.com/element-hq/synapse/issues/18823))
|
||||
- Fix prefixed support for [MSC4133](https://github.com/matrix-org/matrix-spec-proposals/pull/4133). ([\#18875](https://github.com/element-hq/synapse/issues/18875))
|
||||
- Fix open redirect in legacy SSO flow with the `idp` query parameter. ([\#18909](https://github.com/element-hq/synapse/issues/18909))
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. ([\#18878](https://github.com/element-hq/synapse/issues/18878))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Clarify Python dependency constraints in our deprecation policy. ([\#18856](https://github.com/element-hq/synapse/issues/18856))
|
||||
- Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. Contributed by @maxkratz. ([\#18931](https://github.com/element-hq/synapse/issues/18931))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove obsolete and experimental `/sync/e2ee` endpoint. ([\#18583](https://github.com/element-hq/synapse/issues/18583))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fix `LaterGauge` metrics to collect from all servers. ([\#18791](https://github.com/element-hq/synapse/issues/18791))
|
||||
- Configure Synapse to run [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) Complement tests. ([\#18819](https://github.com/element-hq/synapse/issues/18819))
|
||||
- Remove `sentinel` logcontext usage where we log in `setup`, `start` and `exit`. ([\#18870](https://github.com/element-hq/synapse/issues/18870))
|
||||
- Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. ([\#18874](https://github.com/element-hq/synapse/issues/18874))
|
||||
- Start background tasks after we fork the process (daemonize). ([\#18886](https://github.com/element-hq/synapse/issues/18886))
|
||||
- Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. ([\#18900](https://github.com/element-hq/synapse/issues/18900), [\#18906](https://github.com/element-hq/synapse/issues/18906))
|
||||
- Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. ([\#18907](https://github.com/element-hq/synapse/issues/18907))
|
||||
- Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. ([\#18910](https://github.com/element-hq/synapse/issues/18910))
|
||||
- Split loading config from homeserver `setup`. ([\#18933](https://github.com/element-hq/synapse/issues/18933))
|
||||
- Fix `run_in_background` not being awaited properly in some tests causing `LoggingContext` problems. ([\#18937](https://github.com/element-hq/synapse/issues/18937))
|
||||
- Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938))
|
||||
- Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944))
|
||||
- Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/setup-go from 5.5.0 to 6.0.0. ([\#18891](https://github.com/element-hq/synapse/issues/18891))
|
||||
* Bump actions/setup-python from 5.6.0 to 6.0.0. ([\#18890](https://github.com/element-hq/synapse/issues/18890))
|
||||
* Bump authlib from 1.6.1 to 1.6.3. ([\#18921](https://github.com/element-hq/synapse/issues/18921))
|
||||
* Bump jsonschema from 4.25.0 to 4.25.1. ([\#18897](https://github.com/element-hq/synapse/issues/18897))
|
||||
* Bump log from 0.4.27 to 0.4.28. ([\#18892](https://github.com/element-hq/synapse/issues/18892))
|
||||
* Bump phonenumbers from 9.0.12 to 9.0.13. ([\#18893](https://github.com/element-hq/synapse/issues/18893))
|
||||
* Bump pydantic from 2.11.7 to 2.11.9. ([\#18922](https://github.com/element-hq/synapse/issues/18922))
|
||||
* Bump serde from 1.0.219 to 1.0.223. ([\#18920](https://github.com/element-hq/synapse/issues/18920))
|
||||
* Bump serde_json from 1.0.143 to 1.0.145. ([\#18919](https://github.com/element-hq/synapse/issues/18919))
|
||||
* Bump sigstore/cosign-installer from 3.9.2 to 3.10.0. ([\#18917](https://github.com/element-hq/synapse/issues/18917))
|
||||
* Bump towncrier from 24.8.0 to 25.8.0. ([\#18894](https://github.com/element-hq/synapse/issues/18894))
|
||||
* Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915. ([\#18918](https://github.com/element-hq/synapse/issues/18918))
|
||||
* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895))
|
||||
* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924))
|
||||
|
||||
# Synapse 1.138.2 (2025-09-24)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.138.1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.1 (2025-09-24)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0 (2025-09-09)
|
||||
|
||||
No significant changes since 1.138.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0rc1 (2025-09-02)
|
||||
|
||||
### Features
|
||||
|
||||
- Support for the stable endpoint and scopes of [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) & co. ([\#18549](https://github.com/element-hq/synapse/issues/18549))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Improve database performance of [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban. ([\#18851](https://github.com/element-hq/synapse/issues/18851))
|
||||
- Do not throw an error when fetching a rejected delayed state event on startup. ([\#18858](https://github.com/element-hq/synapse/issues/18858))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fix worker documentation incorrectly indicating all room Admin API requests were capable of being handled by workers. ([\#18853](https://github.com/element-hq/synapse/issues/18853))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Instrument `_ByteProducer` with tracing to measure potential dead time while writing bytes to the request. ([\#18804](https://github.com/element-hq/synapse/issues/18804))
|
||||
- Switch to OpenTracing's `ContextVarsScopeManager` instead of our own custom `LogContextScopeManager`. ([\#18849](https://github.com/element-hq/synapse/issues/18849))
|
||||
- Trace how much work is being done while "recursively fetching redactions". ([\#18854](https://github.com/element-hq/synapse/issues/18854))
|
||||
- Link [upstream Twisted bug](https://github.com/twisted/twisted/issues/12498) tracking the problem that explains why we have to use a `Producer` to write bytes to the request. ([\#18855](https://github.com/element-hq/synapse/issues/18855))
|
||||
- Introduce `EventPersistencePair` type. ([\#18857](https://github.com/element-hq/synapse/issues/18857))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/add-to-project from c0c5949b017d0d4a39f7ba888255881bdac2a823 to 4515659e2b458b27365e167605ac44f219494b66. ([\#18863](https://github.com/element-hq/synapse/issues/18863))
|
||||
* Bump actions/checkout from 4.3.0 to 5.0.0. ([\#18834](https://github.com/element-hq/synapse/issues/18834))
|
||||
* Bump anyhow from 1.0.98 to 1.0.99. ([\#18841](https://github.com/element-hq/synapse/issues/18841))
|
||||
* Bump docker/login-action from 3.4.0 to 3.5.0. ([\#18835](https://github.com/element-hq/synapse/issues/18835))
|
||||
* Bump dtolnay/rust-toolchain from b3b07ba8b418998c39fb20f53e8b695cdcc8de1b to e97e2d8cc328f1b50210efc529dca0028893a2d9. ([\#18862](https://github.com/element-hq/synapse/issues/18862))
|
||||
* Bump phonenumbers from 9.0.11 to 9.0.12. ([\#18837](https://github.com/element-hq/synapse/issues/18837))
|
||||
* Bump regex from 1.11.1 to 1.11.2. ([\#18864](https://github.com/element-hq/synapse/issues/18864))
|
||||
* Bump reqwest from 0.12.22 to 0.12.23. ([\#18842](https://github.com/element-hq/synapse/issues/18842))
|
||||
* Bump ruff from 0.12.7 to 0.12.10. ([\#18865](https://github.com/element-hq/synapse/issues/18865))
|
||||
* Bump serde_json from 1.0.142 to 1.0.143. ([\#18866](https://github.com/element-hq/synapse/issues/18866))
|
||||
* Bump types-bleach from 6.2.0.20250514 to 6.2.0.20250809. ([\#18838](https://github.com/element-hq/synapse/issues/18838))
|
||||
* Bump types-jsonschema from 4.25.0.20250720 to 4.25.1.20250822. ([\#18867](https://github.com/element-hq/synapse/issues/18867))
|
||||
* Bump types-psycopg2 from 2.9.21.20250718 to 2.9.21.20250809. ([\#18836](https://github.com/element-hq/synapse/issues/18836))
|
||||
|
||||
# Synapse 1.137.0 (2025-08-26)
|
||||
|
||||
No significant changes since 1.137.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.137.0rc1 (2025-08-19)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix a bug which could corrupt auth chains making it impossible to perform state resolution. ([\#18746](https://github.com/element-hq/synapse/issues/18746))
|
||||
- Fix error message in `register_new_matrix_user` utility script for empty `registration_shared_secret`. ([\#18780](https://github.com/element-hq/synapse/issues/18780))
|
||||
- Allow enabling [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/pull/4108) when the stable Matrix Authentication Service integration is enabled. ([\#18832](https://github.com/element-hq/synapse/issues/18832))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Include IPv6 networks in `denied-peer-ips` of coturn setup. Contributed by @litetex. ([\#18781](https://github.com/element-hq/synapse/issues/18781))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Update tests to ensure all database tables are emptied when purging a room. ([\#18794](https://github.com/element-hq/synapse/issues/18794))
|
||||
- Instrument the `encode_response` part of Sliding Sync requests for more complete traces in Jaeger. ([\#18815](https://github.com/element-hq/synapse/issues/18815))
|
||||
- Tag Sliding Sync traces when we `wait_for_events`. ([\#18816](https://github.com/element-hq/synapse/issues/18816))
|
||||
- Fix `portdb` CI by hardcoding the new `pg_dump` restrict key that was added due to [CVE-2025-8714](https://nvd.nist.gov/vuln/detail/cve-2025-8714). ([\#18824](https://github.com/element-hq/synapse/issues/18824))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/add-to-project from 5b1a254a3546aef88e0a7724a77a623fa2e47c36 to 0c37450c4be3b6a7582b2fb013c9ebfd9c8e9300. ([\#18557](https://github.com/element-hq/synapse/issues/18557))
|
||||
* Bump actions/cache from 4.2.3 to 4.2.4. ([\#18799](https://github.com/element-hq/synapse/issues/18799))
|
||||
* Bump actions/checkout from 4.2.2 to 4.3.0. ([\#18800](https://github.com/element-hq/synapse/issues/18800))
|
||||
* Bump actions/download-artifact from 4.3.0 to 5.0.0. ([\#18801](https://github.com/element-hq/synapse/issues/18801))
|
||||
* Bump docker/metadata-action from 5.7.0 to 5.8.0. ([\#18773](https://github.com/element-hq/synapse/issues/18773))
|
||||
* Bump mypy from 1.16.1 to 1.17.1. ([\#18775](https://github.com/element-hq/synapse/issues/18775))
|
||||
* Bump phonenumbers from 9.0.10 to 9.0.11. ([\#18797](https://github.com/element-hq/synapse/issues/18797))
|
||||
* Bump pygithub from 2.6.1 to 2.7.0. ([\#18779](https://github.com/element-hq/synapse/issues/18779))
|
||||
* Bump serde_json from 1.0.141 to 1.0.142. ([\#18776](https://github.com/element-hq/synapse/issues/18776))
|
||||
* Bump slab from 0.4.10 to 0.4.11. ([\#18809](https://github.com/element-hq/synapse/issues/18809))
|
||||
* Bump tokio from 1.47.0 to 1.47.1. ([\#18774](https://github.com/element-hq/synapse/issues/18774))
|
||||
* Bump types-pyyaml from 6.0.12.20250516 to 6.0.12.20250809. ([\#18798](https://github.com/element-hq/synapse/issues/18798))
|
||||
* Bump types-setuptools from 80.9.0.20250529 to 80.9.0.20250809. ([\#18796](https://github.com/element-hq/synapse/issues/18796))
|
||||
|
||||
# Synapse 1.136.0 (2025-08-12)
|
||||
|
||||
Note: This release includes the security fixes from `1.135.2` and `1.136.0rc2`, detailed below.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix bug introduced in 1.135.2 and 1.136.0rc2 where the [Make Room Admin API](https://element-hq.github.io/synapse/latest/admin_api/rooms.html#make-room-admin-api) would not treat a room v12's creator power level as the highest in room. ([\#18805](https://github.com/element-hq/synapse/issues/18805))
|
||||
|
||||
|
||||
# Synapse 1.135.2 (2025-08-11)
|
||||
|
||||
This is the Synapse portion of the [Matrix coordinated security release](https://matrix.org/blog/2025/07/security-predisclosure/). This release includes support for [room version](https://spec.matrix.org/v1.15/rooms/) 12 which fixes a number of security vulnerabilities, including [CVE-2025-49090](https://www.cve.org/CVERecord?id=CVE-2025-49090).
|
||||
|
||||
The default room version is not changed. Not all clients will support room version 12 immediately, and not all users will be using the latest version of their clients. Large, public rooms are advised to wait a few weeks before upgrading to room version 12 to allow users throughout the Matrix ecosystem to update their clients.
|
||||
|
||||
Note: release 1.135.1 was skipped due to issues discovered during the release process.
|
||||
|
||||
Two patched Synapse releases are now available:
|
||||
|
||||
* `1.135.2`: stable release comprised of `1.135.0` + security patches
|
||||
* Upgrade to this release **if you are currently running 1.135.0 or below**.
|
||||
* `1.136.0rc2`: unstable release candidate comprised of `1.136.0rc1` + security patches.
|
||||
* Upgrade to this release **only if you are on 1.136.0rc1**.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix invalidation of storage cache that was broken in 1.135.0. ([\#18786](https://github.com/element-hq/synapse/issues/18786))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a parameter to `upgrade_rooms(..)` to allow auto join local users. ([\#82](https://github.com/element-hq/synapse/issues/82))
|
||||
- Speed up upgrading a room with large numbers of banned users. ([\#18574](https://github.com/element-hq/synapse/issues/18574))
|
||||
|
||||
|
||||
# Synapse 1.136.0rc2 (2025-08-11)
|
||||
|
||||
- Update MSC4293 redaction logic for room v12. ([\#80](https://github.com/element-hq/synapse/issues/80))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a parameter to `upgrade_rooms(..)` to allow auto join local users. ([\#83](https://github.com/element-hq/synapse/issues/83))
|
||||
|
||||
|
||||
# Synapse 1.136.0rc1 (2025-08-05)
|
||||
|
||||
Please check [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11360) as this release contains changes to MAS support, metrics labels and the module API which may require your attention when upgrading.
|
||||
|
||||
### Features
|
||||
|
||||
- Add configurable rate limiting for the creation of rooms. ([\#18514](https://github.com/element-hq/synapse/issues/18514))
|
||||
- Add support for [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban. ([\#18540](https://github.com/element-hq/synapse/issues/18540))
|
||||
- When admins enable themselves to see soft-failed events, they will also see if the cause is due to the policy server flagging them as spam via `unsigned`. ([\#18585](https://github.com/element-hq/synapse/issues/18585))
|
||||
- Add ability to configure forward/outbound proxy via homeserver config instead of environment variables. See `http_proxy`, `https_proxy`, `no_proxy_hosts`. ([\#18686](https://github.com/element-hq/synapse/issues/18686))
|
||||
- Advertise experimental support for [MSC4306](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) (Thread Subscriptions) through `/_matrix/clients/versions` if enabled. ([\#18722](https://github.com/element-hq/synapse/issues/18722))
|
||||
- Stabilise support for delegating authentication to [Matrix Authentication Service](https://github.com/element-hq/matrix-authentication-service/). ([\#18759](https://github.com/element-hq/synapse/issues/18759))
|
||||
- Implement the push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306). ([\#18762](https://github.com/element-hq/synapse/issues/18762))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Allow return code 403 (allowed by C2S Spec since v1.2) when fetching profiles via federation. ([\#18696](https://github.com/element-hq/synapse/issues/18696))
|
||||
- Register the MSC4306 (Thread Subscriptions) endpoints in the CS API when the experimental feature is enabled. ([\#18726](https://github.com/element-hq/synapse/issues/18726))
|
||||
- Fix a long-standing bug where suspended users could not have server notices sent to them (a 403 was returned to the admin). ([\#18750](https://github.com/element-hq/synapse/issues/18750))
|
||||
- Fix an issue that could cause logcontexts to be lost on rate-limited requests. Found by @realtyem. ([\#18763](https://github.com/element-hq/synapse/issues/18763))
|
||||
- Fix invalidation of storage cache that was broken in 1.135.0. ([\#18786](https://github.com/element-hq/synapse/issues/18786))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Minor improvements to README. ([\#18700](https://github.com/element-hq/synapse/issues/18700))
|
||||
- Document that there can be multiple workers handling the `receipts` stream. ([\#18760](https://github.com/element-hq/synapse/issues/18760))
|
||||
- Improve worker documentation for some device paths. ([\#18761](https://github.com/element-hq/synapse/issues/18761))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`. See [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11360) for more information. ([\#18737](https://github.com/element-hq/synapse/issues/18737))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add debug logging for HMAC digest verification failures when using the admin API to register users. ([\#18474](https://github.com/element-hq/synapse/issues/18474))
|
||||
- Speed up upgrading a room with large numbers of banned users. ([\#18574](https://github.com/element-hq/synapse/issues/18574))
|
||||
- Fix config documentation generation script on Windows by enforcing UTF-8. ([\#18580](https://github.com/element-hq/synapse/issues/18580))
|
||||
- Refactor cache, background process, `Counter`, `LaterGauge`, `GaugeBucketCollector`, `Histogram`, and `Gauge` metrics to be homeserver-scoped. ([\#18656](https://github.com/element-hq/synapse/issues/18656), [\#18714](https://github.com/element-hq/synapse/issues/18714), [\#18715](https://github.com/element-hq/synapse/issues/18715), [\#18724](https://github.com/element-hq/synapse/issues/18724), [\#18753](https://github.com/element-hq/synapse/issues/18753), [\#18725](https://github.com/element-hq/synapse/issues/18725), [\#18670](https://github.com/element-hq/synapse/issues/18670), [\#18748](https://github.com/element-hq/synapse/issues/18748), [\#18751](https://github.com/element-hq/synapse/issues/18751))
|
||||
- Reduce database usage in Sliding Sync by not querying for background update completion after the update is known to be complete. ([\#18718](https://github.com/element-hq/synapse/issues/18718))
|
||||
- Improve order of validation and ratelimiting in room creation. ([\#18723](https://github.com/element-hq/synapse/issues/18723))
|
||||
- Bump minimum version bound on Twisted to 21.2.0. ([\#18727](https://github.com/element-hq/synapse/issues/18727), [\#18729](https://github.com/element-hq/synapse/issues/18729))
|
||||
- Use `twisted.internet.testing` module in tests instead of deprecated `twisted.test.proto_helpers`. ([\#18728](https://github.com/element-hq/synapse/issues/18728))
|
||||
- Remove obsolete `/send_event` replication endpoint. ([\#18730](https://github.com/element-hq/synapse/issues/18730))
|
||||
- Update metrics linting to be able to handle custom metrics. ([\#18733](https://github.com/element-hq/synapse/issues/18733))
|
||||
- Work around `twisted.protocols.amp.TooLong` error by reducing logging in some tests. ([\#18736](https://github.com/element-hq/synapse/issues/18736))
|
||||
- Prevent "Move labelled issues to correct projects" GitHub Actions workflow from failing when an issue is already on the project board. ([\#18755](https://github.com/element-hq/synapse/issues/18755))
|
||||
- Bump minimum supported Rust version (MSRV) to 1.82.0. Missed in [#18553](https://github.com/element-hq/synapse/pull/18553) (released in Synapse 1.134.0). ([\#18757](https://github.com/element-hq/synapse/issues/18757))
|
||||
- Make `Clock.sleep(...)` return a coroutine, so that mypy can catch places where we don't await on it. ([\#18772](https://github.com/element-hq/synapse/issues/18772))
|
||||
- Update implementation of [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to include automatic subscription conflict prevention as introduced in later drafts. ([\#18756](https://github.com/element-hq/synapse/issues/18756))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump gitpython from 3.1.44 to 3.1.45. ([\#18743](https://github.com/element-hq/synapse/issues/18743))
|
||||
* Bump mypy-zope from 1.0.12 to 1.0.13. ([\#18744](https://github.com/element-hq/synapse/issues/18744))
|
||||
* Bump phonenumbers from 9.0.9 to 9.0.10. ([\#18741](https://github.com/element-hq/synapse/issues/18741))
|
||||
* Bump ruff from 0.12.4 to 0.12.5. ([\#18742](https://github.com/element-hq/synapse/issues/18742))
|
||||
* Bump sentry-sdk from 2.32.0 to 2.33.2. ([\#18745](https://github.com/element-hq/synapse/issues/18745))
|
||||
* Bump tokio from 1.46.1 to 1.47.0. ([\#18740](https://github.com/element-hq/synapse/issues/18740))
|
||||
* Bump types-jsonschema from 4.24.0.20250708 to 4.25.0.20250720. ([\#18703](https://github.com/element-hq/synapse/issues/18703))
|
||||
* Bump types-psycopg2 from 2.9.21.20250516 to 2.9.21.20250718. ([\#18706](https://github.com/element-hq/synapse/issues/18706))
|
||||
|
||||
# Synapse 1.135.0 (2025-08-01)
|
||||
|
||||
No significant changes since 1.135.0rc2.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.135.0rc2 (2025-07-30)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix user failing to deactivate with MAS when `/_synapse/mas` is handled by a worker. ([\#18716](https://github.com/element-hq/synapse/issues/18716))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Fix performance regression introduced in [#18238](https://github.com/element-hq/synapse/issues/18238) by adding a cache to `is_server_admin`. ([\#18747](https://github.com/element-hq/synapse/issues/18747))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.135.0rc1 (2025-07-22)
|
||||
|
||||
### Features
|
||||
|
||||
- Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option. ([\#17984](https://github.com/element-hq/synapse/issues/17984), [\#18684](https://github.com/element-hq/synapse/issues/18684))
|
||||
- Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765). ([\#18195](https://github.com/element-hq/synapse/issues/18195))
|
||||
- If enabled by the user, server admins will see [soft failed](https://spec.matrix.org/v1.13/server-server-api/#soft-failure) events over the Client-Server API. ([\#18238](https://github.com/element-hq/synapse/issues/18238))
|
||||
- Add experimental support for [MSC4277: Harmonizing the reporting endpoints](https://github.com/matrix-org/matrix-spec-proposals/pull/4277). ([\#18263](https://github.com/element-hq/synapse/issues/18263))
|
||||
- Add ability to limit amount of media uploaded by a user in a given time period. ([\#18527](https://github.com/element-hq/synapse/issues/18527))
|
||||
- Enable workers to write directly to the device lists stream and handle device list updates, reducing load on the main process. ([\#18581](https://github.com/element-hq/synapse/issues/18581))
|
||||
- Support arbitrary profile fields. Contributed by @clokep. ([\#18635](https://github.com/element-hq/synapse/issues/18635))
|
||||
- Advertise support for Matrix v1.12. ([\#18647](https://github.com/element-hq/synapse/issues/18647))
|
||||
- Add an option to issue redactions as an admin user via the [admin redaction endpoint](https://element-hq.github.io/synapse/latest/admin_api/user_admin_api.html#redact-all-the-events-of-a-user). ([\#18671](https://github.com/element-hq/synapse/issues/18671))
|
||||
- Add experimental and incomplete support for [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/blob/rei/msc_thread_subscriptions/proposals/4306-thread-subscriptions.md). ([\#18674](https://github.com/element-hq/synapse/issues/18674))
|
||||
- Include `event_id` when getting state with `?format=event`. Contributed by @tulir @ Beeper. ([\#18675](https://github.com/element-hq/synapse/issues/18675))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix CPU and database spinning when retrying sending events to servers whilst at the same time purging those events. ([\#18499](https://github.com/element-hq/synapse/issues/18499))
|
||||
- Don't allow creation of tags with names longer than 255 bytes, [as per the spec](https://spec.matrix.org/v1.15/client-server-api/#events-14). ([\#18660](https://github.com/element-hq/synapse/issues/18660))
|
||||
- Fix `sliding_sync_connections`-related errors when porting from SQLite to Postgres. ([\#18677](https://github.com/element-hq/synapse/issues/18677))
|
||||
- Fix the MAS integration not working when Synapse is started with `--daemonize` or using `synctl`. ([\#18691](https://github.com/element-hq/synapse/issues/18691))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Document that some config options for the user directory are in violation of the Matrix spec. ([\#18548](https://github.com/element-hq/synapse/issues/18548))
|
||||
- Update `rc_delayed_event_mgmt` docs to the actual nesting level. Contributed by @HarHarLinks. ([\#18692](https://github.com/element-hq/synapse/issues/18692))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a dedicated internal API for Matrix Authentication Service to Synapse communication. ([\#18520](https://github.com/element-hq/synapse/issues/18520))
|
||||
- Allow user registrations to be done on workers. ([\#18552](https://github.com/element-hq/synapse/issues/18552))
|
||||
- Remove unnecessary HTTP replication calls. ([\#18564](https://github.com/element-hq/synapse/issues/18564))
|
||||
- Refactor `Measure` block metrics to be homeserver-scoped. ([\#18601](https://github.com/element-hq/synapse/issues/18601))
|
||||
- Refactor cache metrics to be homeserver-scoped. ([\#18604](https://github.com/element-hq/synapse/issues/18604))
|
||||
- Unbreak "Latest dependencies" workflow by using the `--without dev` poetry option instead of removed `--no-dev`. ([\#18617](https://github.com/element-hq/synapse/issues/18617))
|
||||
- Update URL Preview code to work with `lxml` 6.0.0+. ([\#18622](https://github.com/element-hq/synapse/issues/18622))
|
||||
- Use `markdown-it-py` instead of `commonmark` in the release script. ([\#18637](https://github.com/element-hq/synapse/issues/18637))
|
||||
- Fix typing errors with upgraded mypy version. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
|
||||
- Add doc comment explaining that config files are shallowly merged. ([\#18664](https://github.com/element-hq/synapse/issues/18664))
|
||||
- Minor speed up of insertion into `stream_positions` table. ([\#18672](https://github.com/element-hq/synapse/issues/18672))
|
||||
- Remove unused `allow_no_prev_events` option when creating an event. ([\#18676](https://github.com/element-hq/synapse/issues/18676))
|
||||
- Clean up `MetricsResource` and Prometheus hacks. ([\#18687](https://github.com/element-hq/synapse/issues/18687))
|
||||
- Fix dirty `Cargo.lock` changes appearing after install (`base64`). ([\#18689](https://github.com/element-hq/synapse/issues/18689))
|
||||
- Prevent dirty `Cargo.lock` changes from install. ([\#18693](https://github.com/element-hq/synapse/issues/18693))
|
||||
- Correct spelling of 'Admin token used' log line. ([\#18697](https://github.com/element-hq/synapse/issues/18697))
|
||||
- Reduce log spam when client stops downloading media while it is being streamed to them. ([\#18699](https://github.com/element-hq/synapse/issues/18699))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump authlib from 1.6.0 to 1.6.1. ([\#18704](https://github.com/element-hq/synapse/issues/18704))
|
||||
* Bump base64 from 0.21.7 to 0.22.1. ([\#18666](https://github.com/element-hq/synapse/issues/18666))
|
||||
* Bump jsonschema from 4.24.0 to 4.25.0. ([\#18707](https://github.com/element-hq/synapse/issues/18707))
|
||||
* Bump lxml from 5.4.0 to 6.0.0. ([\#18631](https://github.com/element-hq/synapse/issues/18631))
|
||||
* Bump mypy from 1.13.0 to 1.16.1. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
|
||||
* Bump once_cell from 1.19.0 to 1.21.3. ([\#18710](https://github.com/element-hq/synapse/issues/18710))
|
||||
* Bump phonenumbers from 9.0.8 to 9.0.9. ([\#18681](https://github.com/element-hq/synapse/issues/18681))
|
||||
* Bump ruff from 0.12.2 to 0.12.5. ([\#18683](https://github.com/element-hq/synapse/issues/18683), [\#18705](https://github.com/element-hq/synapse/issues/18705))
|
||||
* Bump serde_json from 1.0.140 to 1.0.141. ([\#18709](https://github.com/element-hq/synapse/issues/18709))
|
||||
* Bump sigstore/cosign-installer from 3.9.1 to 3.9.2. ([\#18708](https://github.com/element-hq/synapse/issues/18708))
|
||||
* Bump types-jsonschema from 4.24.0.20250528 to 4.24.0.20250708. ([\#18682](https://github.com/element-hq/synapse/issues/18682))
|
||||
|
||||
# Synapse 1.134.0 (2025-07-15)
|
||||
|
||||
No significant changes since 1.134.0rc1.
|
||||
|
||||
621
Cargo.lock
generated
621
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
31
README.rst
31
README.rst
@@ -8,7 +8,7 @@
|
||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
||||
implementation, written and maintained by `Element <https://element.io>`_.
|
||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||
secure and interoperable real time communications. You can directly run
|
||||
secure and interoperable real-time communications. You can directly run
|
||||
and manage the source code in this repository, available under an AGPL
|
||||
license (or alternatively under a commercial license from Element).
|
||||
There is no support provided by Element unless you have a
|
||||
@@ -23,13 +23,13 @@ ESS builds on Synapse to offer a complete Matrix-based backend including the ful
|
||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||
giving admins the power to easily manage an organization-wide
|
||||
deployment. It includes advanced identity management, auditing,
|
||||
moderation and data retention options as well as Long Term Support and
|
||||
SLAs. ESS can be used to support any Matrix-based frontend client.
|
||||
moderation and data retention options as well as Long-Term Support and
|
||||
SLAs. ESS supports any Matrix-compatible client.
|
||||
|
||||
.. contents::
|
||||
|
||||
🛠️ Installing and configuration
|
||||
===============================
|
||||
🛠️ Installation and configuration
|
||||
==================================
|
||||
|
||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||
@@ -133,7 +133,7 @@ connect from a client: see
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||
and instead specify a homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||
@@ -162,16 +162,15 @@ the public internet. Without it, anyone can freely register accounts on your hom
|
||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||
federation.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account. Your name will take
|
||||
the form of::
|
||||
Your new Matrix ID will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account in the form of::
|
||||
|
||||
@localpart:my.domain.name
|
||||
|
||||
(pronounced "at localpart on my dot domain dot name").
|
||||
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'User name' box.
|
||||
desired ``localpart`` in the 'Username' box.
|
||||
|
||||
🎯 Troubleshooting and support
|
||||
==============================
|
||||
@@ -209,10 +208,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
|
||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||
before creating that mapping.
|
||||
|
||||
**They are not where accounts or credentials are stored - these live on home
|
||||
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
|
||||
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
|
||||
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
|
||||
|
||||
This process is very security-sensitive, as there is obvious risk of spam if it
|
||||
This process is highly security-sensitive, as there is an obvious risk of spam if it
|
||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||
@@ -238,9 +237,9 @@ email address.
|
||||
We welcome contributions to Synapse from the community!
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||
|
||||
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
||||
information for Synapse developers as well as Synapse administrators.
|
||||
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
@@ -266,6 +265,8 @@ This software is dual-licensed by New Vector Ltd (Element). It can be used eithe
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
||||
|
||||
|
||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||
:alt: (get community support in #synapse:matrix.org)
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
import itertools
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from setuptools_rust import Binding, RustExtension
|
||||
|
||||
|
||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||
|
||||
@@ -19,17 +19,17 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
# This flag is a no-op in the latest versions. Instead, we need to
|
||||
# specify this in the `bdist_wheel` config below.
|
||||
py_limited_api=True,
|
||||
# We force always building in release mode, as we can't tell the
|
||||
# difference between using `poetry` in development vs production.
|
||||
# We always build in release mode, as we can't distinguish
|
||||
# between using `poetry` in development vs production.
|
||||
debug=False,
|
||||
)
|
||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||
setup_kwargs["zip_safe"] = False
|
||||
|
||||
# We lookup the minimum supported python version by looking at
|
||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
|
||||
# We look up the minimum supported Python version with
|
||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
|
||||
# version that matches. We then convert that into the `py_limited_api` form,
|
||||
# e.g. cp39 for python 3.9.
|
||||
# e.g. cp39 for Python 3.9.
|
||||
py_limited_api: str
|
||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||
for minor_version in itertools.count(start=8):
|
||||
|
||||
2
changelog.d/19021.feature
Normal file
2
changelog.d/19021.feature
Normal file
@@ -0,0 +1,2 @@
|
||||
Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
||||
to allow an admin to fetch the space/room hierarchy for a given space.
|
||||
1
changelog.d/19046.misc
Normal file
1
changelog.d/19046.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.
|
||||
1
changelog.d/19047.doc
Normal file
1
changelog.d/19047.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update the link to the Debian oldstable package for SQLite.
|
||||
1
changelog.d/19047.misc
Normal file
1
changelog.d/19047.misc
Normal file
@@ -0,0 +1 @@
|
||||
Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it.
|
||||
1
changelog.d/19047.removal
Normal file
1
changelog.d/19047.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove support for SQLite < 3.37.2.
|
||||
1
changelog.d/19073.doc
Normal file
1
changelog.d/19073.doc
Normal file
@@ -0,0 +1 @@
|
||||
Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce.
|
||||
1
changelog.d/19079.bugfix
Normal file
1
changelog.d/19079.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane.
|
||||
1
changelog.d/19080.misc
Normal file
1
changelog.d/19080.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update deprecated code in the release script to prevent a warning message from being printed.
|
||||
1
changelog.d/19081.misc
Normal file
1
changelog.d/19081.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update the deprecated poetry development dependencies group name in `pyproject.toml`.
|
||||
1
changelog.d/19085.misc
Normal file
1
changelog.d/19085.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove `pp38*` skip selector from cibuildwheel to silence warning.
|
||||
1
changelog.d/19088.misc
Normal file
1
changelog.d/19088.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry.
|
||||
1
changelog.d/19089.misc
Normal file
1
changelog.d/19089.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update the release script's generated announcement text to include a title and extra text for RC's.
|
||||
1
changelog.d/19092.misc
Normal file
1
changelog.d/19092.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix lints on main branch.
|
||||
@@ -4396,7 +4396,7 @@
|
||||
"exemplar": false,
|
||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
|
||||
"instant": false,
|
||||
"legendFormat": "{{server_name}} ",
|
||||
"legendFormat": "{{origin_server_name}} ",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
@@ -4518,7 +4518,7 @@
|
||||
"exemplar": false,
|
||||
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
|
||||
"instant": false,
|
||||
"legendFormat": "{{server_name}}",
|
||||
"legendFormat": "{{destination_server_name}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ import datetime
|
||||
import html
|
||||
import json
|
||||
import urllib.request
|
||||
from typing import List
|
||||
|
||||
import pydot
|
||||
|
||||
@@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
||||
return f"{pdu_id}@{origin}"
|
||||
|
||||
|
||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by querying a homeserver.
|
||||
@@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host: str, room: str) -> List[dict]:
|
||||
def get_pdus(host: str, room: str) -> list[dict]:
|
||||
transaction = json.loads(
|
||||
urllib.request.urlopen(
|
||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||
|
||||
146
debian/changelog
vendored
146
debian/changelog
vendored
@@ -1,3 +1,149 @@
|
||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.141.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
||||
|
||||
* The licensing specifier has been updated to add an optional
|
||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
||||
this manner - the debian metadata was just not updated to reflect it.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.138.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
|
||||
|
||||
matrix-synapse-py3 (1.137.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.137.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
|
||||
|
||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.137.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
||||
|
||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
|
||||
|
||||
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
|
||||
|
||||
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
|
||||
|
||||
matrix-synapse-py3 (1.134.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.134.0.
|
||||
|
||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
||||
|
||||
Files: *
|
||||
Copyright: 2023 New Vector Ltd
|
||||
License: AGPL-3.0-or-later
|
||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
||||
|
||||
Files: synapse/config/saml2.py
|
||||
Copyright: 2015, Ericsson
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG POETRY_VERSION=2.1.1
|
||||
|
||||
###
|
||||
@@ -142,10 +142,10 @@ RUN \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
chown _apt:root /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||
done
|
||||
@@ -171,16 +171,11 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
||||
|
||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||
# libraries to the right place, else the `COPY` won't work.
|
||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||
# already present in the runtime image.
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# syntax=docker/dockerfile:1-labs
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG REDIS_VERSION=7.2
|
||||
|
||||
# first of all, we create a base image with dependencies which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -11,15 +12,27 @@ ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
ARG DEBIAN_VERSION
|
||||
ARG REDIS_VERSION
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
||||
RUN \
|
||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
nginx-light
|
||||
nginx-light \
|
||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
||||
libicu76
|
||||
|
||||
RUN \
|
||||
# remove default page
|
||||
@@ -35,19 +48,12 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Copy over dependencies
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base /uv /
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
@@ -18,10 +18,10 @@ FROM $FROM
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same
|
||||
# debian version as Synapse's docker image (so the versions of the shared
|
||||
# libraries match). Any missing libraries need to be added to either the
|
||||
# Synapse image or docker/Dockerfile-workers.
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
|
||||
@@ -54,7 +54,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister:2, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
@@ -65,6 +64,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher, \
|
||||
device_lists:2, \
|
||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||
|
||||
fi
|
||||
|
||||
@@ -98,6 +98,10 @@ rc_delayed_event_mgmt:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_room_creation:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
allow_device_name_lookup_over_federation: true
|
||||
@@ -129,6 +133,8 @@ experimental_features:
|
||||
msc3984_appservice_key_query: true
|
||||
# Invite filtering
|
||||
msc4155_enabled: true
|
||||
# Thread Subscriptions
|
||||
msc4306_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
@@ -77,6 +77,13 @@ loggers:
|
||||
#}
|
||||
synapse.visibility.filtered_event_debug:
|
||||
level: DEBUG
|
||||
|
||||
{#
|
||||
If Synapse is under test, we don't care about seeing the "Applying schema" log
|
||||
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
|
||||
#}
|
||||
synapse.storage.prepare_database:
|
||||
level: WARN
|
||||
{% endif %}
|
||||
|
||||
root:
|
||||
|
||||
@@ -65,13 +65,10 @@ from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
@@ -96,7 +93,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
# Watching /_matrix/media and related needs a "media" listener
|
||||
# Stream Writers require "client" and "replication" listeners because they
|
||||
# have to attach by instance_map to the master process and have client endpoints.
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
@@ -178,6 +175,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
|
||||
"^/_matrix/client/(r0|v3)/delete_devices$",
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
@@ -194,6 +194,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -265,13 +268,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"frontend_proxy": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"account_data": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
@@ -306,6 +302,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"device_lists": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"typing": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
@@ -322,6 +325,15 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"thread_subscriptions": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/unstable/io.element.msc4306/.*",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
}
|
||||
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
@@ -393,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_types_set: Set[str],
|
||||
worker_types_set: set[str],
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@@ -412,16 +424,18 @@ def add_worker_roles_to_shared_config(
|
||||
# streams
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
|
||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||
# sharded, and therefore doesn't belong here.
|
||||
singular_stream_writers = [
|
||||
# This is a list of the stream_writers.
|
||||
stream_writers = {
|
||||
"account_data",
|
||||
"events",
|
||||
"device_lists",
|
||||
"presence",
|
||||
"receipts",
|
||||
"to_device",
|
||||
"typing",
|
||||
"push_rules",
|
||||
]
|
||||
"thread_subscriptions",
|
||||
}
|
||||
|
||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||
# roles, check each.
|
||||
@@ -431,28 +445,11 @@ def add_worker_roles_to_shared_config(
|
||||
if "federation_sender" in worker_types_set:
|
||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||
|
||||
if "event_persister" in worker_types_set:
|
||||
# Event persisters write to the events stream, so we need to update
|
||||
# the list of event stream writers
|
||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||
worker_name
|
||||
)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||
instance_map[worker_name] = {
|
||||
"path": f"/run/worker.{worker_port}",
|
||||
}
|
||||
else:
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
for worker in worker_types_set:
|
||||
if worker in singular_stream_writers:
|
||||
if worker in stream_writers:
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker, []
|
||||
).append(worker_name)
|
||||
@@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: Optional[dict[str, Any]],
|
||||
to_be_merged_dict: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
@@ -484,7 +481,7 @@ def merge_worker_template_configs(
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
new_dict: dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
@@ -509,8 +506,8 @@ def merge_worker_template_configs(
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: dict[str, Any], worker_name: str
|
||||
) -> dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
@@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
@@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
@@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
requested_worker_types: list[str],
|
||||
) -> dict[str, set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
@@ -633,14 +630,14 @@ def parse_worker_types(
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
dict_to_return: dict[str, set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
@@ -684,7 +681,7 @@ def parse_worker_types(
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
worker_types_set: set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
@@ -743,7 +740,7 @@ def generate_worker_files(
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
requested_worker_types: dict[str, set[str]],
|
||||
) -> None:
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
@@ -764,7 +761,7 @@ def generate_worker_files(
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners: List[Any]
|
||||
listeners: list[Any]
|
||||
if using_unix_sockets:
|
||||
listeners = [
|
||||
{
|
||||
@@ -792,12 +789,12 @@ def generate_worker_files(
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
# We pass this to the Supervisor template later to generate the appropriate
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
worker_descriptors: list[dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
@@ -805,14 +802,14 @@ def generate_worker_files(
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
nginx_upstreams: dict[str, set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
nginx_locations: dict[str, str] = {}
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -846,7 +843,7 @@ def generate_worker_files(
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
worker_config: dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
@@ -876,6 +873,13 @@ def generate_worker_files(
|
||||
else:
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Special case for event_persister: those are just workers that write to
|
||||
# the `events` stream. For other workers, the worker name is the same
|
||||
# name of the stream they write to, but for some reason it is not the
|
||||
# case for event_persister.
|
||||
if "event_persister" in worker_types_set:
|
||||
worker_types_set.add("events")
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_types_set, worker_name, worker_port
|
||||
@@ -1022,7 +1026,7 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||
extra_log_template_args: dict[str, Optional[str]] = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
@@ -1046,7 +1050,7 @@ def generate_worker_log_config(
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--generate-only",
|
||||
@@ -1080,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
requested_worker_types: dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
|
||||
@@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bookworm.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||
# We hardcode the use of Debian trixie here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting trixie.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
@@ -69,7 +69,7 @@ def generate_config_from_template(
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ: Dict[str, Any] = dict(os_environ)
|
||||
environ: dict[str, Any] = dict(os_environ)
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
subprocess.run(args, check=True)
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
mode = args[1] if len(args) > 1 else "run"
|
||||
|
||||
# if we were given an explicit user to switch to, do so
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
- [Admin API](usage/administration/admin_api/README.md)
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Fetch Event](admin_api/fetch_event.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
@@ -74,6 +75,7 @@
|
||||
- [Users](admin_api/user_admin_api.md)
|
||||
- [Server Version](admin_api/version_api.md)
|
||||
- [Federation](usage/administration/admin_api/federation.md)
|
||||
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
|
||||
- [Manhole](manhole.md)
|
||||
- [Monitoring](metrics-howto.md)
|
||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||
|
||||
67
docs/admin_api/client_server_api_extensions.md
Normal file
67
docs/admin_api/client_server_api_extensions.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# Client-Server API Extensions
|
||||
|
||||
Server administrators can set special account data to change how the Client-Server API behaves for
|
||||
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
|
||||
|
||||
All configuration options can be set through the `io.element.synapse.admin_client_config` global
|
||||
account data on the admin's user account.
|
||||
|
||||
Example:
|
||||
```
|
||||
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
|
||||
{
|
||||
"return_soft_failed_events": true
|
||||
}
|
||||
```
|
||||
|
||||
## See soft failed events
|
||||
|
||||
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
|
||||
|
||||
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
|
||||
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
|
||||
exclude soft failed events.
|
||||
|
||||
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
|
||||
in the event's `unsigned` content like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "m.room.message",
|
||||
"other": "event_fields_go_here",
|
||||
"unsigned": {
|
||||
"io.element.synapse.soft_failed": true,
|
||||
"io.element.synapse.policy_server_spammy": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Default: `false`
|
||||
|
||||
## See events marked spammy by policy servers
|
||||
|
||||
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
|
||||
|
||||
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
|
||||
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
|
||||
to `true`.
|
||||
|
||||
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
|
||||
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
|
||||
is always `true`.
|
||||
|
||||
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
|
||||
event's `unsigned` content, like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "m.room.message",
|
||||
"other": "event_fields_go_here",
|
||||
"unsigned": {
|
||||
"io.element.synapse.soft_failed": true,
|
||||
"io.element.synapse.policy_server_spammy": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`
|
||||
53
docs/admin_api/fetch_event.md
Normal file
53
docs/admin_api/fetch_event.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Fetch Event API
|
||||
|
||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
||||
originated in.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"event": {
|
||||
"auth_events": [
|
||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
||||
],
|
||||
"content": {
|
||||
"body": "Hey now",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"depth": 6,
|
||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
||||
"hashes": {
|
||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
||||
},
|
||||
"origin_server_ts": 799,
|
||||
"prev_events": [
|
||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
||||
],
|
||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
||||
"sender": "@user:test",
|
||||
"signatures": {
|
||||
"test": {
|
||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 799
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -39,6 +39,40 @@ the use of the
|
||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||
Admin API.
|
||||
|
||||
## Query a piece of media by ID
|
||||
|
||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
||||
information is requested for remote media which is not cached the endpoint will return 404.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body with media info like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"media_info": {
|
||||
"media_origin": "remote.com",
|
||||
"user_id": null,
|
||||
"media_id": "sdginwegWEG",
|
||||
"media_type": "img/png",
|
||||
"media_length": 67,
|
||||
"upload_name": "test.png",
|
||||
"created_ts": 300,
|
||||
"filesystem_id": "wgeweg",
|
||||
"url_cache": null,
|
||||
"last_access_ts": 400,
|
||||
"quarantined_by": null,
|
||||
"authenticated": false,
|
||||
"safe_from_quarantine": null,
|
||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
|
||||
@@ -1115,3 +1115,76 @@ Example response:
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Admin Space Hierarchy Endpoint
|
||||
|
||||
This API allows an admin to fetch the space/room hierarchy for a given space,
|
||||
returning details about that room and any children the room may have, paginating
|
||||
over the space tree in a depth-first manner to locate child rooms. This is
|
||||
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
|
||||
room membership when returning room summaries.
|
||||
|
||||
The endpoint does not query other servers over federation about remote rooms
|
||||
that the server has not joined. This is a deliberate trade-off: while this
|
||||
means it will leave some holes in the hierarchy that we could otherwise
|
||||
sometimes fill in, it significantly improves the endpoint's response time and
|
||||
the admin endpoint is designed for managing rooms local to the homeserver
|
||||
anyway.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` - An optional pagination token, provided when there are more rooms to
|
||||
return than the limit.
|
||||
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
|
||||
defaults to `50`.
|
||||
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
|
||||
integer. 0 would correspond to just the root room, 1 would include just the
|
||||
root room's children, etc. If not provided will recurse into the space tree without limit.
|
||||
|
||||
Request:
|
||||
|
||||
```http
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"rooms":
|
||||
[
|
||||
{ "children_state": [
|
||||
{
|
||||
"content": {
|
||||
"via": ["local_test_server"]
|
||||
},
|
||||
"origin_server_ts": 1500,
|
||||
"sender": "@user:test",
|
||||
"state_key": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"type": "m.space.child"
|
||||
}
|
||||
],
|
||||
"name": "space room",
|
||||
"guest_can_join": false,
|
||||
"join_rule": "public",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
|
||||
"room_type": "m.space",
|
||||
"world_readable": false
|
||||
},
|
||||
|
||||
{
|
||||
"children_state": [],
|
||||
"guest_can_join": true,
|
||||
"join_rule": "invite",
|
||||
"name": "nefarious",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"topic": "being bad",
|
||||
"world_readable": false}
|
||||
],
|
||||
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1227,7 +1227,7 @@ See also the
|
||||
|
||||
## Controlling whether a user is shadow-banned
|
||||
|
||||
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||
A shadow-banned users receives successful responses to their client-server API requests,
|
||||
but the events are not propagated into rooms. This can be an effective tool as it
|
||||
(hopefully) takes longer for the user to realise they are being moderated before
|
||||
@@ -1464,8 +1464,11 @@ _Added in Synapse 1.72.0._
|
||||
|
||||
## Redact all the events of a user
|
||||
|
||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
|
||||
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
|
||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
|
||||
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
|
||||
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
|
||||
admin user is not admin/does not have the specified power level to issue redactions. An option
|
||||
is provided to override the default and allow the admin to issue the redactions in all cases.
|
||||
|
||||
The API is
|
||||
```
|
||||
@@ -1475,7 +1478,7 @@ POST /_synapse/admin/v1/user/$user_id/redact
|
||||
"rooms": ["!roomid1", "!roomid2"]
|
||||
}
|
||||
```
|
||||
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
||||
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
|
||||
otherwise all the events in the rooms provided in the request will be redacted.
|
||||
|
||||
The API starts redaction process running, and returns immediately with a JSON body with
|
||||
@@ -1501,7 +1504,10 @@ The following JSON body parameter must be provided:
|
||||
The following JSON body parameters are optional:
|
||||
|
||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
|
||||
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
|
||||
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
|
||||
where the admin does not have the sufficient power level to issue the redactions.
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
Deprecation Policy for Platform Dependencies
|
||||
============================================
|
||||
# Deprecation Policy
|
||||
|
||||
Synapse has a number of platform dependencies, including Python, Rust,
|
||||
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||
we support, and when we drop support for versions in the future.
|
||||
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
|
||||
and **application dependencies** (Python and Rust packages). This document outlines the
|
||||
policy towards which versions we support, and when we drop support for versions in the
|
||||
future.
|
||||
|
||||
|
||||
Policy
|
||||
------
|
||||
## Platform Dependencies
|
||||
|
||||
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
||||
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
||||
@@ -23,11 +21,11 @@ people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
Context
|
||||
-------
|
||||
|
||||
### Context
|
||||
|
||||
It is important for system admins to have a clear understanding of the platform
|
||||
requirements of Synapse and its deprecation policies so that they can
|
||||
@@ -50,4 +48,42 @@ the ecosystem.
|
||||
On a similar note, SQLite does not generally have a concept of "supported
|
||||
release"; bugfixes are published for the latest minor release only. We chose to
|
||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
|
||||
|
||||
## Application dependencies
|
||||
|
||||
For application-level Python dependencies, we often specify loose version constraints
|
||||
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
|
||||
are only added when necessary to prevent known incompatibilities.
|
||||
|
||||
When selecting a minimum version, while we are mindful of the impact on downstream
|
||||
package maintainers, our primary focus is on the maintainability and progress of Synapse
|
||||
itself.
|
||||
|
||||
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
|
||||
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
|
||||
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
|
||||
extra scrutiny or consideration at this point.
|
||||
|
||||
We aggressively update Rust dependencies. Since these are statically linked and managed
|
||||
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
|
||||
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
|
||||
they don't have their own system-level dependencies.
|
||||
|
||||
|
||||
### Context
|
||||
|
||||
Because Python dependencies can easily be managed in a virtual environment, we are less
|
||||
concerned about the criteria for selecting minimum versions. The only thing of concern
|
||||
is making sure we're not making it unnecessarily difficult for downstream package
|
||||
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
|
||||
|
||||
The situation for Rust dependencies is fundamentally different. For packagers, the
|
||||
concerns around Python dependency versions do not apply. The `cargo` tool handles
|
||||
downloading and building all libraries to satisfy dependencies, and these libraries are
|
||||
statically linked into the final binary. This means that from a packager's perspective,
|
||||
the Rust dependency versions are an internal build detail, not a runtime dependency to
|
||||
be managed on the target system. Consequently, we have even greater flexibility to
|
||||
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
|
||||
package Rust libraries, but this appears to be the outlier rather than the norm.
|
||||
|
||||
@@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
|
||||
@@ -59,6 +59,28 @@ def do_request_handling():
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
### The `sentinel` context
|
||||
|
||||
The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty
|
||||
sentinel value to represent the root logcontext. This is what is used when there is no
|
||||
other logcontext set. The phrase "clear/reset the logcontext" means to set the current
|
||||
logcontext to the `sentinel` logcontext.
|
||||
|
||||
No CPU/database usage metrics are recorded against the `sentinel` logcontext.
|
||||
|
||||
Ideally, nothing from the Synapse homeserver would be logged against the `sentinel`
|
||||
logcontext as we want to know which server the logs came from. In practice, this is not
|
||||
always the case yet especially outside of request handling.
|
||||
|
||||
Global things outside of Synapse (e.g. Twisted reactor code) should run in the
|
||||
`sentinel` logcontext. It's only when it calls into application code that a logcontext
|
||||
gets activated. This means the reactor should be started in the `sentinel` logcontext,
|
||||
and any time an awaitable yields control back to the reactor, it should reset the
|
||||
logcontext to be the `sentinel` logcontext. This is important to avoid leaking the
|
||||
current logcontext to the reactor (which would then get picked up and associated with
|
||||
the next thing the reactor does).
|
||||
|
||||
|
||||
## Using logcontexts with awaitables
|
||||
|
||||
Awaitables break the linear flow of code so that there is no longer a single entry point
|
||||
@@ -121,8 +143,7 @@ cares about.
|
||||
The following sections describe pitfalls and helpful patterns when
|
||||
implementing these rules.
|
||||
|
||||
Always await your awaitables
|
||||
----------------------------
|
||||
## Always await your awaitables
|
||||
|
||||
Whenever you get an awaitable back from a function, you should `await` on
|
||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||
@@ -181,6 +202,171 @@ async def sleep(seconds):
|
||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
```
|
||||
|
||||
## Deferred callbacks
|
||||
|
||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
||||
restore its own logcontext, then run:
|
||||
|
||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
||||
|
||||
This behavior creates two specific issues:
|
||||
|
||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
||||
sentinel before returning. This means our calling function will continue with the
|
||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
||||
|
||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
||||
callback could finish before the callback finishes (bad).
|
||||
|
||||
In the following example, the deferred callback is called with the "main" logcontext and
|
||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
||||
context main` and leaking the `main` logcontext into the reactor which will then
|
||||
erronously be associated with the next task the reactor picks up.
|
||||
|
||||
```python
|
||||
async def competing_callback():
|
||||
# Since this is run with the "main" logcontext, when the "competing"
|
||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
||||
# and leaking the `main` logcontext into the reactor.
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
|
||||
def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# Call the callback within the "main" logcontext.
|
||||
d.callback(None)
|
||||
# Bad: This will be logged against sentinel logcontext
|
||||
logger.debug("ugh")
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
||||
await your awaitables":
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
||||
# still active. This works because `d` already follows our logcontext
|
||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
||||
await d
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
||||
warning and no leakage into the reactor.
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
with PreserveLoggingContext():
|
||||
# Call the callback with the sentinel logcontext.
|
||||
d.callback(None)
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
||||
callback in the current context without running into issues:
|
||||
|
||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
||||
leaking the logcontext into the reactor.
|
||||
|
||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
||||
|
||||
|
||||
```python
|
||||
async def main():
|
||||
main_context = LoggingContext("main")
|
||||
with PreserveLoggingContext(main_context):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
||||
# will wait on that to complete and reset the logcontext to the sentinel
|
||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
||||
run_in_background(lambda: (d.callback(None), d)[1])
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
|
||||
...
|
||||
|
||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
||||
# logcontext.
|
||||
#
|
||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
||||
# finishing the "main" logcontext.
|
||||
with PreserveLoggingContext():
|
||||
# Finish the "main" logcontext
|
||||
with main_context:
|
||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
||||
# appropriately. And in any case, it's probably best not to call the internal
|
||||
# methods directly.
|
||||
pass
|
||||
```
|
||||
|
||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
||||
callback in the current logcontext.
|
||||
|
||||
|
||||
### Deferred errbacks and cancellations
|
||||
|
||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
||||
callback act the same in this regard (see section above).
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.errback(failure)
|
||||
```
|
||||
|
||||
Additionally, cancellation is the same as directly calling the errback with a
|
||||
`twisted.internet.defer.CancelledError`:
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.cancel()
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Fire-and-forget
|
||||
|
||||
Sometimes you want to fire off a chain of execution, but not wait for
|
||||
@@ -362,3 +548,19 @@ chain are dropped. Dropping the the reference to an awaitable you're
|
||||
supposed to be awaiting is bad practice, so this doesn't
|
||||
actually happen too much. Unfortunately, when it does happen, it will
|
||||
lead to leaked logcontexts which are incredibly hard to track down.
|
||||
|
||||
|
||||
## Debugging logcontext issues
|
||||
|
||||
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
|
||||
downstream and can point to an unrelated part of the codebase. It's best to enable debug
|
||||
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
|
||||
backwards in the logs from the point where the issue is observed to find the root cause.
|
||||
|
||||
`log.config.yaml`
|
||||
```yaml
|
||||
loggers:
|
||||
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
|
||||
synapse.logging.context.debug:
|
||||
level: DEBUG
|
||||
```
|
||||
|
||||
@@ -64,3 +64,68 @@ If multiple modules implement this callback, they will be considered in order. I
|
||||
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
||||
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
||||
implementations of this callback.
|
||||
|
||||
### `get_media_upload_limits_for_user`
|
||||
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental. The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when processing a request to store content in the media repository. This can be used to dynamically override
|
||||
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
||||
|
||||
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
|
||||
|
||||
If an empty list is returned then no limits are applied (**warning:** users will be able
|
||||
to upload as much data as they desire).
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
If there are no registered modules, or if all modules return `None`, then
|
||||
the default
|
||||
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
|
||||
will be used.
|
||||
|
||||
### `on_media_upload_limit_exceeded`
|
||||
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
Caution: This callback is currently experimental. The method signature or behaviour
|
||||
may change without notice.
|
||||
</span>**
|
||||
|
||||
Called when a user attempts to upload media that would exceed a
|
||||
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
|
||||
|
||||
This callback will only be called on workers which handle
|
||||
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
|
||||
requests.
|
||||
|
||||
This could be used to inform the user that they have reached a media upload limit through
|
||||
some external method.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
||||
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
|
||||
* `sent_bytes`: The number of bytes already sent during the period of the limit.
|
||||
* `attempted_bytes`: The number of bytes that the user attempted to send.
|
||||
|
||||
@@ -195,12 +195,15 @@ _Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that o
|
||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
Called when processing a room creation request.
|
||||
Called when processing a room creation or room upgrade request.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
||||
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
||||
subset of the state of the previous room).
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
|
||||
@@ -186,6 +186,7 @@ oidc_providers:
|
||||
4. Note the slug of your application, Client ID and Client Secret.
|
||||
|
||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||
Note: The provider must have a signing key set and must not use an encryption key.
|
||||
|
||||
Synapse config:
|
||||
```yaml
|
||||
@@ -204,6 +205,12 @@ oidc_providers:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||
[...]
|
||||
jwt_config:
|
||||
enabled: true
|
||||
secret: "your client secret" # TO BE FILLED (same as `client_secret` above)
|
||||
algorithm: "RS256"
|
||||
# (...other fields)
|
||||
```
|
||||
|
||||
### Dex
|
||||
|
||||
@@ -7,8 +7,23 @@ proxy is supported, not SOCKS proxy or anything else.
|
||||
|
||||
## Configure
|
||||
|
||||
The `http_proxy`, `https_proxy`, `no_proxy` environment variables are used to
|
||||
specify proxy settings. The environment variable is not case sensitive.
|
||||
The proxy settings can be configured in the homeserver configuration file via
|
||||
[`http_proxy`](../usage/configuration/config_documentation.md#http_proxy),
|
||||
[`https_proxy`](../usage/configuration/config_documentation.md#https_proxy), and
|
||||
[`no_proxy_hosts`](../usage/configuration/config_documentation.md#no_proxy_hosts).
|
||||
|
||||
`homeserver.yaml` example:
|
||||
```yaml
|
||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
||||
no_proxy_hosts:
|
||||
- master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
```
|
||||
|
||||
The proxy settings can also be configured via the `http_proxy`, `https_proxy`,
|
||||
`no_proxy` environment variables. The environment variable is not case sensitive.
|
||||
- `http_proxy`: Proxy server to use for HTTP requests.
|
||||
- `https_proxy`: Proxy server to use for HTTPS requests.
|
||||
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
|
||||
@@ -44,7 +59,7 @@ The proxy will be **used** for:
|
||||
- phone-home stats
|
||||
- recaptcha validation
|
||||
- CAS auth validation
|
||||
- OpenID Connect
|
||||
- OpenID Connect (OIDC)
|
||||
- Outbound federation
|
||||
- Federation (checking public key revocation)
|
||||
- Fetching public keys of other servers
|
||||
@@ -53,7 +68,7 @@ The proxy will be **used** for:
|
||||
It will **not be used** for:
|
||||
|
||||
- Application Services
|
||||
- Identity servers
|
||||
- Matrix Identity servers
|
||||
- In worker configurations
|
||||
- connections between workers
|
||||
- connections from workers to Redis
|
||||
|
||||
@@ -88,7 +88,8 @@ This will install and start a systemd service called `coturn`.
|
||||
denied-peer-ip=172.16.0.0-172.31.255.255
|
||||
|
||||
# recommended additional local peers to block, to mitigate external access to internal services.
|
||||
# https://www.rtcsec.com/article/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||
# https://www.enablesecurity.com/blog/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||
# https://www.enablesecurity.com/blog/cve-2020-26262-bypass-of-coturns-access-control-protection/#further-concerns-what-else
|
||||
no-multicast-peers
|
||||
denied-peer-ip=0.0.0.0-0.255.255.255
|
||||
denied-peer-ip=100.64.0.0-100.127.255.255
|
||||
@@ -101,6 +102,14 @@ This will install and start a systemd service called `coturn`.
|
||||
denied-peer-ip=198.51.100.0-198.51.100.255
|
||||
denied-peer-ip=203.0.113.0-203.0.113.255
|
||||
denied-peer-ip=240.0.0.0-255.255.255.255
|
||||
denied-peer-ip=::1
|
||||
denied-peer-ip=64:ff9b::-64:ff9b::ffff:ffff
|
||||
denied-peer-ip=::ffff:0.0.0.0-::ffff:255.255.255.255
|
||||
denied-peer-ip=100::-100::ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=2001::-2001:1ff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=2002::-2002:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=fc00::-fdff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=fe80::-febf:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
|
||||
# special case the turn server itself so that client->TURN->TURN->client flows work
|
||||
# this should be one of the turn server's listening IPs
|
||||
|
||||
@@ -35,7 +35,7 @@ handlers:
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
handlers: [remote]
|
||||
handlers: [file]
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
```
|
||||
|
||||
130
docs/upgrade.md
130
docs/upgrade.md
@@ -117,6 +117,130 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.141.0
|
||||
|
||||
## Docker images now based on Debian `trixie` with Python 3.13
|
||||
|
||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
||||
are using the Docker images as a base image you may need to e.g. adjust the
|
||||
paths you mount any additional Python packages at.
|
||||
|
||||
# Upgrading to v1.140.0
|
||||
|
||||
## Users of `synapse-s3-storage-provider` must update the module to `v1.6.0`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider/)
|
||||
module must update it to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0),
|
||||
otherwise users will be unable to upload or download media.
|
||||
|
||||
# Upgrading to v1.139.0
|
||||
|
||||
## `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a
|
||||
result of [MSC4190: Device management for application
|
||||
services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which
|
||||
adds new endpoints for application services to create encryption-ready devices
|
||||
with other than `/login` or `/register` without `inhibit_login=true`.
|
||||
|
||||
If an application service you use starts to fail with the mentioned error,
|
||||
ensure it is up to date. If it is, then kindly let the author know that they
|
||||
need to update their implementation to call `/register` with
|
||||
`inhibit_login=true`.
|
||||
|
||||
# Upgrading to v1.138.2
|
||||
|
||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
||||
|
||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
||||
|
||||
This notice also applies to the v1.139.0 release.
|
||||
|
||||
# Upgrading to v1.136.0
|
||||
|
||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||
|
||||
The `run_as_background_process` function is now a method of the `ModuleApi` class. If
|
||||
you were using the function directly from the module API, it will continue to work fine
|
||||
but the background process metrics will not include an accurate `server_name` label.
|
||||
This kind of metric labeling isn't relevant for many use cases and is used to
|
||||
differentiate Synapse instances running in the same Python process (relevant to Synapse
|
||||
Pro: Small Hosts). We recommend updating your usage to use the new
|
||||
`ModuleApi.run_as_background_process` method to stay on top of future changes.
|
||||
|
||||
<details>
|
||||
<summary>Example <code>run_as_background_process</code> upgrade</summary>
|
||||
|
||||
Before:
|
||||
```python
|
||||
class MyModule:
|
||||
def __init__(self, module_api: ModuleApi) -> None:
|
||||
run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
||||
```
|
||||
|
||||
After:
|
||||
```python
|
||||
class MyModule:
|
||||
def __init__(self, module_api: ModuleApi) -> None:
|
||||
module_api.run_as_background_process(__name__ + ":setup_database", self.setup_database)
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Metric labels have changed on `synapse_federation_last_received_pdu_time` and `synapse_federation_last_sent_pdu_time`
|
||||
|
||||
Previously, the `synapse_federation_last_received_pdu_time` and
|
||||
`synapse_federation_last_sent_pdu_time` metrics both used the `server_name` label to
|
||||
differentiate between different servers that we send and receive events from.
|
||||
|
||||
Since we're now using the `server_name` label to differentiate between different Synapse
|
||||
homeserver instances running in the same process, these metrics have been changed as follows:
|
||||
|
||||
- `synapse_federation_last_received_pdu_time` now uses the `origin_server_name` label
|
||||
- `synapse_federation_last_sent_pdu_time` now uses the `destination_server_name` label
|
||||
|
||||
The Grafana dashboard JSON in `contrib/grafana/synapse.json` has been updated to reflect
|
||||
this change but you will need to manually update your own existing Grafana dashboards
|
||||
using these metrics.
|
||||
|
||||
## Stable integration with Matrix Authentication Service
|
||||
|
||||
Support for [Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service) is now stable, with a simplified configuration.
|
||||
This stable integration requires MAS 0.20.0 or later.
|
||||
|
||||
The existing `experimental_features.msc3861` configuration option is now deprecated and will be removed in Synapse v1.137.0.
|
||||
|
||||
Synapse deployments already using MAS should now use the new configuration options:
|
||||
|
||||
```yaml
|
||||
matrix_authentication_service:
|
||||
# Enable the MAS integration
|
||||
enabled: true
|
||||
# The base URL where Synapse will contact MAS
|
||||
endpoint: http://localhost:8080
|
||||
# The shared secret used to authenticate MAS requests, must be the same as `matrix.secret` in the MAS configuration
|
||||
# See https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#matrix
|
||||
secret: "asecurerandomsecretstring"
|
||||
```
|
||||
|
||||
They must remove the `experimental_features.msc3861` configuration option from their configuration.
|
||||
|
||||
They can also remove the client previously used by Synapse [in the MAS configuration](https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#clients) as it is no longer in use.
|
||||
|
||||
# Upgrading to v1.135.0
|
||||
|
||||
## `on_user_registration` module API callback may now run on any worker
|
||||
|
||||
Previously, the `on_user_registration` callback would only run on the main
|
||||
process. Modules relying on this callback must assume that they may now be
|
||||
called from any worker, not just the main process.
|
||||
|
||||
# Upgrading to v1.134.0
|
||||
|
||||
## ICU bundled with Synapse
|
||||
@@ -129,10 +253,10 @@ native ICU library on your system is no longer required.
|
||||
## Documented endpoint which can be delegated to a federation worker
|
||||
|
||||
The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation
|
||||
worker. This is not new behaviour, but had not been documented yet. The
|
||||
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
|
||||
worker. This is not new behaviour, but had not been documented yet. The
|
||||
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
|
||||
been updated to include it. Make sure to check your reverse proxy rules if you
|
||||
are using workers.
|
||||
are using workers.
|
||||
|
||||
# Upgrading to v1.126.0
|
||||
|
||||
|
||||
@@ -610,6 +610,61 @@ manhole_settings:
|
||||
ssh_pub_key_path: CONFDIR/id_rsa.pub
|
||||
```
|
||||
---
|
||||
### `http_proxy`
|
||||
|
||||
*(string|null)* Proxy server to use for HTTP requests.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
|
||||
```
|
||||
---
|
||||
### `https_proxy`
|
||||
|
||||
*(string|null)* Proxy server to use for HTTPS requests.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
|
||||
```
|
||||
---
|
||||
### `no_proxy_hosts`
|
||||
|
||||
*(array)* List of hosts, IP addresses, or IP ranges in CIDR format which should not use the proxy. Synapse will directly connect to these hosts.
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
no_proxy_hosts:
|
||||
- master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
```
|
||||
---
|
||||
### `matrix_authentication_service`
|
||||
|
||||
*(object)* The `matrix_authentication_service` setting configures integration with [Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service).
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `enabled` (boolean): Whether or not to enable the MAS integration. If this is set to `false`, Synapse will use its legacy internal authentication API. Defaults to `false`.
|
||||
|
||||
* `endpoint` (string): The URL where Synapse can reach MAS. This *must* have the `discovery` and `oauth` resources mounted. Defaults to `"http://localhost:8080"`.
|
||||
|
||||
* `secret` (string|null): A shared secret that will be used to authenticate requests from and to MAS.
|
||||
|
||||
* `secret_path` (string|null): Alternative to `secret`, reading the shared secret from a file. The file should be a plain text file, containing only the secret. Synapse reads the secret from the given file once at startup.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
matrix_authentication_service:
|
||||
enabled: true
|
||||
secret: someverysecuresecret
|
||||
endpoint: http://localhost:8080
|
||||
```
|
||||
---
|
||||
### `dummy_events_threshold`
|
||||
|
||||
*(integer)* Forward extremities can build up in a room due to networking delays between homeservers. Once this happens in a large room, calculation of the state of that room can become quite expensive. To mitigate this, once the number of forward extremities reaches a given threshold, Synapse will send an `org.matrix.dummy_event` event, which will reduce the forward extremities in the room.
|
||||
@@ -1925,9 +1980,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_delayed_event_mgmt:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -1952,9 +2006,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_reports:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -1964,6 +2017,30 @@ rc_reports:
|
||||
burst_count: 20.0
|
||||
```
|
||||
---
|
||||
### `rc_room_creation`
|
||||
|
||||
*(object)* Sets rate limits for how often users are able to create rooms.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `per_second` (number): Maximum number of requests a client can send per second.
|
||||
|
||||
* `burst_count` (number): Maximum number of requests a client can send before being throttled.
|
||||
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
---
|
||||
### `federation_rr_transactions_per_room_per_second`
|
||||
|
||||
*(integer)* Sets outgoing federation transaction frequency for sending read-receipts, per-room.
|
||||
@@ -2086,6 +2163,26 @@ Example configuration:
|
||||
max_upload_size: 60M
|
||||
```
|
||||
---
|
||||
### `media_upload_limits`
|
||||
|
||||
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
|
||||
These limits are applied in addition to the `max_upload_size` limit above (which applies to individual uploads).
|
||||
|
||||
An empty list means no limits are applied.
|
||||
|
||||
These settings can be overridden using the `get_media_upload_limits_for_user` module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
||||
|
||||
Defaults to `[]`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
media_upload_limits:
|
||||
- time_period: 1h
|
||||
max_size: 100M
|
||||
- time_period: 1w
|
||||
max_size: 500M
|
||||
```
|
||||
---
|
||||
### `max_image_pixels`
|
||||
|
||||
*(byte size)* Maximum number of pixels that will be thumbnailed. Defaults to `"32M"`.
|
||||
@@ -2340,6 +2437,21 @@ Example configuration:
|
||||
recaptcha_public_key: YOUR_PUBLIC_KEY
|
||||
```
|
||||
---
|
||||
### `recaptcha_public_key_path`
|
||||
|
||||
*(string|null)* An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows the public key to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the public key. Synapse reads the public key from the given file once at startup.
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
|
||||
Defaults to `null`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
recaptcha_public_key_path: /path/to/key/file
|
||||
```
|
||||
---
|
||||
### `recaptcha_private_key`
|
||||
|
||||
*(string|null)* This homeserver's ReCAPTCHA private key. Must be specified if [`enable_registration_captcha`](#enable_registration_captcha) is enabled. Defaults to `null`.
|
||||
@@ -2349,6 +2461,21 @@ Example configuration:
|
||||
recaptcha_private_key: YOUR_PRIVATE_KEY
|
||||
```
|
||||
---
|
||||
### `recaptcha_private_key_path`
|
||||
|
||||
*(string|null)* An alternative to [`recaptcha_private_key`](#recaptcha_private_key): allows the private key to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the private key. Synapse reads the private key from the given file once at startup.
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
|
||||
Defaults to `null`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
recaptcha_private_key_path: /path/to/key/file
|
||||
```
|
||||
---
|
||||
### `enable_registration_captcha`
|
||||
|
||||
*(boolean)* Set to `true` to require users to complete a CAPTCHA test when registering an account. Requires a valid ReCaptcha public/private key.
|
||||
@@ -2446,6 +2573,28 @@ Example configuration:
|
||||
turn_allow_guests: false
|
||||
```
|
||||
---
|
||||
### `matrix_rtc`
|
||||
|
||||
*(object)* Options related to MatrixRTC. Defaults to `{}`.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `transports` (array): A list of transport types and arguments to use for MatrixRTC connections. Defaults to `[]`.
|
||||
|
||||
Options for each entry include:
|
||||
|
||||
* `type` (string): The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
|
||||
* `livekit_service_url` (string): The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
matrix_rtc:
|
||||
transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
```
|
||||
---
|
||||
## Registration
|
||||
|
||||
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
||||
@@ -3761,7 +3910,11 @@ encryption_enabled_by_default_for_room_type: invite
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `enabled` (boolean): Defines whether users can search the user directory. If false then empty responses are returned to all queries. Defaults to `true`.
|
||||
* `enabled` (boolean): Defines whether users can search the user directory. If `false` then empty responses are returned to all queries.
|
||||
|
||||
*Warning: While the homeserver may determine which subset of users are searched, the Matrix specification requires homeservers to include (at minimum) users visible in public rooms and users sharing a room with the requester. Using `false` improves performance but violates this requirement.*
|
||||
|
||||
Defaults to `true`.
|
||||
|
||||
* `search_all_users` (boolean): Defines whether to search all users visible to your homeserver at the time the search is performed. If set to true, will return all users known to the homeserver matching the search query. If false, search results will only contain users visible in public rooms and users sharing a room with the requester.
|
||||
|
||||
@@ -4044,7 +4197,7 @@ The default power levels for each preset are:
|
||||
"m.room.history_visibility": 100
|
||||
"m.room.canonical_alias": 50
|
||||
"m.room.avatar": 50
|
||||
"m.room.tombstone": 100
|
||||
"m.room.tombstone": 100 (150 if MSC4289 is used)
|
||||
"m.room.server_acl": 100
|
||||
"m.room.encryption": 100
|
||||
```
|
||||
@@ -4291,6 +4444,8 @@ This setting has the following sub-options:
|
||||
|
||||
* `push_rules` (string): Name of a worker assigned to the `push_rules` stream.
|
||||
|
||||
* `device_lists` (string): Name of a worker assigned to the `device_lists` stream.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
stream_writers:
|
||||
|
||||
@@ -120,6 +120,9 @@ worker_replication_secret: ""
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
# For additional Redis configuration options (TLS, authentication, etc.),
|
||||
# see the Synapse configuration documentation:
|
||||
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis
|
||||
|
||||
instance_map:
|
||||
main:
|
||||
@@ -238,7 +241,9 @@ information.
|
||||
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
||||
^/_matrix/client/(r0|v3|unstable)/devices$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/deactivate$
|
||||
^/_matrix/client/(r0|v3)/delete_devices$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
|
||||
^/_matrix/client/versions$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
|
||||
@@ -250,14 +255,16 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||
^/_synapse/admin/v1/rooms/
|
||||
^/_synapse/admin/v1/rooms/[^/]+$
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
||||
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
@@ -282,7 +289,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
@@ -329,7 +335,6 @@ set to `true`), the following endpoints can be handled by the worker:
|
||||
^/_synapse/admin/v2/users/[^/]+$
|
||||
^/_synapse/admin/v1/username_available$
|
||||
^/_synapse/admin/v1/users/[^/]+/_allow_cross_signing_replacement_without_uia$
|
||||
# Only the GET method:
|
||||
^/_synapse/admin/v1/users/[^/]+/devices$
|
||||
|
||||
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
||||
@@ -530,8 +535,9 @@ the stream writer for the `account_data` stream:
|
||||
|
||||
##### The `receipts` stream
|
||||
|
||||
The following endpoints should be routed directly to the worker configured as
|
||||
the stream writer for the `receipts` stream:
|
||||
The `receipts` stream supports multiple writers. The following endpoints
|
||||
can be handled by any worker, but should be routed directly to one of the workers
|
||||
configured as stream writer for the `receipts` stream:
|
||||
|
||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
|
||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
|
||||
@@ -550,6 +556,18 @@ the stream writer for the `push_rules` stream:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
|
||||
##### The `device_lists` stream
|
||||
|
||||
The `device_lists` stream supports multiple writers. The following endpoints
|
||||
can be handled by any worker, but should be routed directly to one of the workers
|
||||
configured as stream writer for the `device_lists` stream:
|
||||
|
||||
^/_matrix/client/(r0|v3)/delete_devices$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
|
||||
|
||||
#### Restrict outbound federation traffic to a specific set of workers
|
||||
|
||||
The
|
||||
|
||||
18
mypy.ini
18
mypy.ini
@@ -1,6 +1,17 @@
|
||||
[mypy]
|
||||
namespace_packages = True
|
||||
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
|
||||
# Our custom mypy plugin should remain first in this list.
|
||||
#
|
||||
# mypy has a limitation where it only chooses the first plugin that returns a non-None
|
||||
# value for each hook (known-limitation, c.f.
|
||||
# https://github.com/python/mypy/issues/19524). We workaround this by putting our custom
|
||||
# plugin first in the plugin order and then manually calling any other conflicting
|
||||
# plugin hooks in our own plugin followed by our own checks.
|
||||
#
|
||||
# If you add a new plugin, make sure to check whether the hooks being used conflict with
|
||||
# our custom plugin hooks and if so, manually call the other plugin's hooks in our
|
||||
# custom plugin. (also applies to if the plugin is updated in the future)
|
||||
plugins = scripts-dev/mypy_synapse_plugin.py, pydantic.mypy, mypy_zope:plugin
|
||||
follow_imports = normal
|
||||
show_error_codes = True
|
||||
show_traceback = True
|
||||
@@ -58,7 +69,7 @@ warn_unused_ignores = False
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||
;; `[tool.poetry.dev-dependencies]` list.
|
||||
;; `[tool.poetry.group.dev.dependencies]` list.
|
||||
|
||||
# https://github.com/lepture/authlib/issues/460
|
||||
[mypy-authlib.*]
|
||||
@@ -99,3 +110,6 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-multipart.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-mypy_zope.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
1188
poetry.lock
generated
1188
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -78,6 +78,12 @@ select = [
|
||||
"LOG",
|
||||
# flake8-logging-format
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade
|
||||
"UP006"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
@@ -101,10 +107,10 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.134.0"
|
||||
version = "1.141.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
readme = "README.rst"
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
packages = [
|
||||
@@ -178,8 +184,13 @@ signedjson = "^1.1.0"
|
||||
service-identity = ">=18.1.0"
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
Twisted = {extras = ["tls"], version = ">=18.9.0"}
|
||||
treq = ">=15.1"
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
||||
treq = ">=21.5.0"
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
pyOpenSSL = ">=16.0.0"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -195,7 +206,9 @@ pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
prometheus-client = ">=0.4.0"
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
prometheus-client = ">=0.6.0"
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
attrs = ">=19.2.0,!=21.1.0"
|
||||
@@ -224,7 +237,7 @@ pydantic = ">=1.7.4, <3"
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
setuptools_rust = ">=1.3"
|
||||
@@ -312,12 +325,12 @@ all = [
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.12.2"
|
||||
ruff = "0.12.10"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
@@ -326,7 +339,6 @@ lxml-stubs = ">=0.4.0"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
@@ -349,8 +361,8 @@ idna = ">=2.5"
|
||||
click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
commonmark = ">=0.9.1"
|
||||
pygithub = ">=1.55"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
@@ -375,10 +387,10 @@ build-backend = "poetry.core.masonry.api"
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython and PyPy 3.8: EOLed
|
||||
# - CPython 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
skip = "cp38* pp38* *-musllinux_i686"
|
||||
skip = "cp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
@@ -7,7 +7,7 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.81.0"
|
||||
rust-version = "1.82.0"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
@@ -23,7 +23,7 @@ name = "synapse.synapse_rust"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.63"
|
||||
base64 = "0.21.7"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.6.0"
|
||||
headers = "0.4.0"
|
||||
http = "1.1.0"
|
||||
|
||||
@@ -61,6 +61,7 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -71,10 +72,10 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
||||
assert!(matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
@@ -107,6 +108,7 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -117,10 +119,10 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
||||
assert!(matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
@@ -153,6 +155,7 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -163,10 +166,10 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
let matched = eval.match_condition(&condition, None, None, None).unwrap();
|
||||
assert!(!matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
@@ -199,6 +202,7 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -210,7 +214,8 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person"), None));
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ enum EventInternalMetadataData {
|
||||
RecheckRedaction(bool),
|
||||
SoftFailed(bool),
|
||||
ProactivelySend(bool),
|
||||
PolicyServerSpammy(bool),
|
||||
Redacted(bool),
|
||||
TxnId(Box<str>),
|
||||
TokenId(i64),
|
||||
@@ -96,6 +97,13 @@ impl EventInternalMetadataData {
|
||||
.to_owned()
|
||||
.into_any(),
|
||||
),
|
||||
EventInternalMetadataData::PolicyServerSpammy(o) => (
|
||||
pyo3::intern!(py, "policy_server_spammy"),
|
||||
o.into_pyobject(py)
|
||||
.unwrap_infallible()
|
||||
.to_owned()
|
||||
.into_any(),
|
||||
),
|
||||
EventInternalMetadataData::Redacted(o) => (
|
||||
pyo3::intern!(py, "redacted"),
|
||||
o.into_pyobject(py)
|
||||
@@ -155,6 +163,11 @@ impl EventInternalMetadataData {
|
||||
.extract()
|
||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
||||
),
|
||||
"policy_server_spammy" => EventInternalMetadataData::PolicyServerSpammy(
|
||||
value
|
||||
.extract()
|
||||
.with_context(|| format!("'{key_str}' has invalid type"))?,
|
||||
),
|
||||
"redacted" => EventInternalMetadataData::Redacted(
|
||||
value
|
||||
.extract()
|
||||
@@ -427,6 +440,17 @@ impl EventInternalMetadata {
|
||||
set_property!(self, ProactivelySend, obj);
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_policy_server_spammy(&self) -> PyResult<bool> {
|
||||
Ok(get_property_opt!(self, PolicyServerSpammy)
|
||||
.copied()
|
||||
.unwrap_or(false))
|
||||
}
|
||||
#[setter]
|
||||
fn set_policy_server_spammy(&mut self, obj: bool) {
|
||||
set_property!(self, PolicyServerSpammy, obj);
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_redacted(&self) -> PyResult<bool> {
|
||||
let bool = get_property!(self, Redacted)?;
|
||||
|
||||
@@ -12,12 +12,12 @@
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::{collections::HashMap, future::Future};
|
||||
use std::{collections::HashMap, future::Future, sync::OnceLock};
|
||||
|
||||
use anyhow::Context;
|
||||
use futures::TryStreamExt;
|
||||
use once_cell::sync::OnceCell;
|
||||
use pyo3::{create_exception, exceptions::PyException, prelude::*, types::PyString};
|
||||
use pyo3::{create_exception, exceptions::PyException, prelude::*};
|
||||
use reqwest::RequestBuilder;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
@@ -30,25 +30,112 @@ create_exception!(
|
||||
"A panic which happened in a Rust future"
|
||||
);
|
||||
|
||||
/// The tokio runtime that we're using to run async Rust libs.
|
||||
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
||||
impl RustPanicError {
|
||||
fn from_panic(panic_err: &(dyn std::any::Any + Send + 'static)) -> PyErr {
|
||||
// Apparently this is how you extract the panic message from a panic
|
||||
let panic_message = if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
||||
str_slice
|
||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
||||
string
|
||||
} else {
|
||||
"unknown error"
|
||||
};
|
||||
Self::new_err(panic_message.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to the `twisted.internet.defer` module.
|
||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||
/// This is the name of the attribute where we store the runtime on the reactor
|
||||
static TOKIO_RUNTIME_ATTR: &str = "__synapse_rust_tokio_runtime";
|
||||
|
||||
/// Access the tokio runtime.
|
||||
fn runtime() -> PyResult<&'static Runtime> {
|
||||
RUNTIME.get_or_try_init(|| {
|
||||
/// A Python wrapper around a Tokio runtime.
|
||||
///
|
||||
/// This allows us to 'store' the runtime on the reactor instance, starting it
|
||||
/// when the reactor starts, and stopping it when the reactor shuts down.
|
||||
#[pyclass]
|
||||
struct PyTokioRuntime {
|
||||
runtime: Option<Runtime>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl PyTokioRuntime {
|
||||
fn start(&mut self) -> PyResult<()> {
|
||||
// TODO: allow customization of the runtime like the number of threads
|
||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
||||
.worker_threads(4)
|
||||
.enable_all()
|
||||
.build()
|
||||
.context("building tokio runtime")?;
|
||||
.build()?;
|
||||
|
||||
Ok(runtime)
|
||||
})
|
||||
self.runtime = Some(runtime);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn shutdown(&mut self) -> PyResult<()> {
|
||||
let runtime = self
|
||||
.runtime
|
||||
.take()
|
||||
.context("Runtime was already shutdown")?;
|
||||
|
||||
// Dropping the runtime will shut it down
|
||||
drop(runtime);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl PyTokioRuntime {
|
||||
/// Get the handle to the Tokio runtime, if it is running.
|
||||
fn handle(&self) -> PyResult<&tokio::runtime::Handle> {
|
||||
let handle = self
|
||||
.runtime
|
||||
.as_ref()
|
||||
.context("Tokio runtime is not running")?
|
||||
.handle();
|
||||
|
||||
Ok(handle)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a handle to the Tokio runtime stored on the reactor instance, or create
|
||||
/// a new one.
|
||||
fn runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
||||
if !reactor.hasattr(TOKIO_RUNTIME_ATTR)? {
|
||||
install_runtime(reactor)?;
|
||||
}
|
||||
|
||||
get_runtime(reactor)
|
||||
}
|
||||
|
||||
/// Install a new Tokio runtime on the reactor instance.
|
||||
fn install_runtime(reactor: &Bound<PyAny>) -> PyResult<()> {
|
||||
let py = reactor.py();
|
||||
let runtime = PyTokioRuntime { runtime: None };
|
||||
let runtime = runtime.into_pyobject(py)?;
|
||||
|
||||
// Attach the runtime to the reactor, starting it when the reactor is
|
||||
// running, stopping it when the reactor is shutting down
|
||||
reactor.call_method1("callWhenRunning", (runtime.getattr("start")?,))?;
|
||||
reactor.call_method1(
|
||||
"addSystemEventTrigger",
|
||||
("after", "shutdown", runtime.getattr("shutdown")?),
|
||||
)?;
|
||||
reactor.setattr(TOKIO_RUNTIME_ATTR, runtime)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a reference to a Tokio runtime handle stored on the reactor instance.
|
||||
fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
|
||||
// This will raise if `TOKIO_RUNTIME_ATTR` is not set or if it is
|
||||
// not a `Runtime`. Careful that this could happen if the user sets it
|
||||
// manually, or if multiple versions of `pyo3-twisted` are used!
|
||||
let runtime: Bound<PyTokioRuntime> = reactor.getattr(TOKIO_RUNTIME_ATTR)?.extract()?;
|
||||
Ok(runtime.borrow())
|
||||
}
|
||||
|
||||
/// A reference to the `twisted.internet.defer` module.
|
||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||
|
||||
/// Access to the `twisted.internet.defer` module.
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
||||
Ok(DEFER
|
||||
@@ -61,14 +148,13 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
||||
let child_module: Bound<'_, PyModule> = PyModule::new(py, "http_client")?;
|
||||
child_module.add_class::<HttpClient>()?;
|
||||
|
||||
// Make sure we fail early if we can't build the lazy statics.
|
||||
runtime()?;
|
||||
// Make sure we fail early if we can't load some modules
|
||||
defer(py)?;
|
||||
|
||||
m.add_submodule(&child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import acl` work.
|
||||
// synapse.synapse_rust import http_client` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.http_client", child_module)?;
|
||||
@@ -85,13 +171,16 @@ struct HttpClient {
|
||||
#[pymethods]
|
||||
impl HttpClient {
|
||||
#[new]
|
||||
pub fn py_new(reactor: PyObject, user_agent: &str) -> PyResult<HttpClient> {
|
||||
pub fn py_new(reactor: Bound<PyAny>, user_agent: &str) -> PyResult<HttpClient> {
|
||||
// Make sure the runtime gets installed
|
||||
let _ = runtime(&reactor)?;
|
||||
|
||||
Ok(HttpClient {
|
||||
client: reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.build()
|
||||
.context("building reqwest client")?,
|
||||
reactor,
|
||||
reactor: reactor.unbind(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -129,7 +218,7 @@ impl HttpClient {
|
||||
builder: RequestBuilder,
|
||||
response_limit: usize,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
create_deferred(py, self.reactor.clone_ref(py), async move {
|
||||
create_deferred(py, self.reactor.bind(py), async move {
|
||||
let response = builder.send().await.context("sending request")?;
|
||||
|
||||
let status = response.status();
|
||||
@@ -159,7 +248,11 @@ impl HttpClient {
|
||||
/// tokio runtime.
|
||||
///
|
||||
/// Does not handle deferred cancellation or contextvars.
|
||||
fn create_deferred<F, O>(py: Python, reactor: PyObject, fut: F) -> PyResult<Bound<'_, PyAny>>
|
||||
fn create_deferred<'py, F, O>(
|
||||
py: Python<'py>,
|
||||
reactor: &Bound<'py, PyAny>,
|
||||
fut: F,
|
||||
) -> PyResult<Bound<'py, PyAny>>
|
||||
where
|
||||
F: Future<Output = PyResult<O>> + Send + 'static,
|
||||
for<'a> O: IntoPyObject<'a> + Send + 'static,
|
||||
@@ -168,10 +261,13 @@ where
|
||||
let deferred_callback = deferred.getattr("callback")?.unbind();
|
||||
let deferred_errback = deferred.getattr("errback")?.unbind();
|
||||
|
||||
let rt = runtime()?;
|
||||
let task = rt.spawn(fut);
|
||||
let rt = runtime(reactor)?;
|
||||
let handle = rt.handle()?;
|
||||
let task = handle.spawn(fut);
|
||||
|
||||
rt.spawn(async move {
|
||||
// Unbind the reactor so that we can pass it to the task
|
||||
let reactor = reactor.clone().unbind();
|
||||
handle.spawn(async move {
|
||||
let res = task.await;
|
||||
|
||||
Python::with_gil(move |py| {
|
||||
@@ -179,16 +275,12 @@ where
|
||||
let res = match res {
|
||||
Ok(r) => r,
|
||||
Err(join_err) => match join_err.try_into_panic() {
|
||||
Ok(panic_err) => {
|
||||
let panic_message = get_panic_message(&panic_err);
|
||||
Err(RustPanicError::new_err(
|
||||
PyString::new(py, panic_message).unbind(),
|
||||
))
|
||||
}
|
||||
Ok(panic_err) => Err(RustPanicError::from_panic(&panic_err)),
|
||||
Err(err) => Err(PyException::new_err(format!("Task cancelled: {err}"))),
|
||||
},
|
||||
};
|
||||
|
||||
// Re-bind the reactor
|
||||
let reactor = reactor.bind(py);
|
||||
|
||||
// Send the result to the deferred, via `.callback(..)` or `.errback(..)`
|
||||
@@ -207,17 +299,22 @@ where
|
||||
});
|
||||
});
|
||||
|
||||
Ok(deferred)
|
||||
// Make the deferred follow the Synapse logcontext rules
|
||||
make_deferred_yieldable(py, &deferred)
|
||||
}
|
||||
|
||||
/// Try and get the panic message out of the panic
|
||||
fn get_panic_message<'a>(panic_err: &'a (dyn std::any::Any + Send + 'static)) -> &'a str {
|
||||
// Apparently this is how you extract the panic message from a panic
|
||||
if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
|
||||
str_slice
|
||||
} else if let Some(string) = panic_err.downcast_ref::<String>() {
|
||||
string
|
||||
} else {
|
||||
"unknown error"
|
||||
}
|
||||
static MAKE_DEFERRED_YIELDABLE: OnceLock<pyo3::Py<pyo3::PyAny>> = OnceLock::new();
|
||||
|
||||
/// Given a deferred, make it follow the Synapse logcontext rules
|
||||
fn make_deferred_yieldable<'py>(
|
||||
py: Python<'py>,
|
||||
deferred: &Bound<'py, PyAny>,
|
||||
) -> PyResult<Bound<'py, PyAny>> {
|
||||
let make_deferred_yieldable = MAKE_DEFERRED_YIELDABLE.get_or_init(|| {
|
||||
let sys = PyModule::import(py, "synapse.logging.context").unwrap();
|
||||
let func = sys.getattr("make_deferred_yieldable").unwrap().unbind();
|
||||
func
|
||||
});
|
||||
|
||||
make_deferred_yieldable.call1(py, (deferred,))?.extract(py)
|
||||
}
|
||||
|
||||
@@ -289,6 +289,29 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
default_enabled: true,
|
||||
}];
|
||||
|
||||
pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.unsubscribed_thread"),
|
||||
priority_class: 6,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: false },
|
||||
)]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.subscribed_thread"),
|
||||
priority_class: 6,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: true },
|
||||
)]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
||||
@@ -706,6 +729,7 @@ lazy_static! {
|
||||
.iter()
|
||||
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||
.map(|rule| { (&*rule.rule_id, rule) })
|
||||
.collect();
|
||||
|
||||
@@ -106,8 +106,11 @@ pub struct PushRuleEvaluator {
|
||||
/// flag as MSC1767 (extensible events core).
|
||||
msc3931_enabled: bool,
|
||||
|
||||
// If MSC4210 (remove legacy mentions) is enabled.
|
||||
/// If MSC4210 (remove legacy mentions) is enabled.
|
||||
msc4210_enabled: bool,
|
||||
|
||||
/// If MSC4306 (thread subscriptions) is enabled.
|
||||
msc4306_enabled: bool,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
@@ -126,6 +129,7 @@ impl PushRuleEvaluator {
|
||||
room_version_feature_flags,
|
||||
msc3931_enabled,
|
||||
msc4210_enabled,
|
||||
msc4306_enabled,
|
||||
))]
|
||||
pub fn py_new(
|
||||
flattened_keys: BTreeMap<String, JsonValue>,
|
||||
@@ -138,6 +142,7 @@ impl PushRuleEvaluator {
|
||||
room_version_feature_flags: Vec<String>,
|
||||
msc3931_enabled: bool,
|
||||
msc4210_enabled: bool,
|
||||
msc4306_enabled: bool,
|
||||
) -> Result<Self, Error> {
|
||||
let body = match flattened_keys.get("content.body") {
|
||||
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
|
||||
@@ -156,6 +161,7 @@ impl PushRuleEvaluator {
|
||||
room_version_feature_flags,
|
||||
msc3931_enabled,
|
||||
msc4210_enabled,
|
||||
msc4306_enabled,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -167,12 +173,19 @@ impl PushRuleEvaluator {
|
||||
///
|
||||
/// Returns the set of actions, if any, that match (filtering out any
|
||||
/// `dont_notify` and `coalesce` actions).
|
||||
#[pyo3(signature = (push_rules, user_id=None, display_name=None))]
|
||||
///
|
||||
/// msc4306_thread_subscription_state: (Only populated if MSC4306 is enabled)
|
||||
/// The thread subscription state corresponding to the thread containing this event.
|
||||
/// - `None` if the event is not in a thread, or if MSC4306 is disabled.
|
||||
/// - `Some(true)` if the event is in a thread and the user has a subscription for that thread
|
||||
/// - `Some(false)` if the event is in a thread and the user does NOT have a subscription for that thread
|
||||
#[pyo3(signature = (push_rules, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
|
||||
pub fn run(
|
||||
&self,
|
||||
push_rules: &FilteredPushRules,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
msc4306_thread_subscription_state: Option<bool>,
|
||||
) -> Vec<Action> {
|
||||
'outer: for (push_rule, enabled) in push_rules.iter() {
|
||||
if !enabled {
|
||||
@@ -204,7 +217,12 @@ impl PushRuleEvaluator {
|
||||
Condition::Known(KnownCondition::RoomVersionSupports { feature: _ }),
|
||||
);
|
||||
|
||||
match self.match_condition(condition, user_id, display_name) {
|
||||
match self.match_condition(
|
||||
condition,
|
||||
user_id,
|
||||
display_name,
|
||||
msc4306_thread_subscription_state,
|
||||
) {
|
||||
Ok(true) => {}
|
||||
Ok(false) => continue 'outer,
|
||||
Err(err) => {
|
||||
@@ -237,14 +255,20 @@ impl PushRuleEvaluator {
|
||||
}
|
||||
|
||||
/// Check if the given condition matches.
|
||||
#[pyo3(signature = (condition, user_id=None, display_name=None))]
|
||||
#[pyo3(signature = (condition, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
|
||||
fn matches(
|
||||
&self,
|
||||
condition: Condition,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
msc4306_thread_subscription_state: Option<bool>,
|
||||
) -> bool {
|
||||
match self.match_condition(&condition, user_id, display_name) {
|
||||
match self.match_condition(
|
||||
&condition,
|
||||
user_id,
|
||||
display_name,
|
||||
msc4306_thread_subscription_state,
|
||||
) {
|
||||
Ok(true) => true,
|
||||
Ok(false) => false,
|
||||
Err(err) => {
|
||||
@@ -262,6 +286,7 @@ impl PushRuleEvaluator {
|
||||
condition: &Condition,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
msc4306_thread_subscription_state: Option<bool>,
|
||||
) -> Result<bool, Error> {
|
||||
let known_condition = match condition {
|
||||
Condition::Known(known) => known,
|
||||
@@ -393,6 +418,13 @@ impl PushRuleEvaluator {
|
||||
&& self.room_version_feature_flags.contains(&flag)
|
||||
}
|
||||
}
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed } => {
|
||||
if !self.msc4306_enabled {
|
||||
false
|
||||
} else {
|
||||
msc4306_thread_subscription_state == Some(*subscribed)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
@@ -536,10 +568,11 @@ fn push_rule_evaluator() {
|
||||
vec![],
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
|
||||
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"), None);
|
||||
assert_eq!(result.len(), 3);
|
||||
}
|
||||
|
||||
@@ -566,6 +599,7 @@ fn test_requires_room_version_supports_condition() {
|
||||
flags,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -575,6 +609,7 @@ fn test_requires_room_version_supports_condition() {
|
||||
&FilteredPushRules::default(),
|
||||
Some("@bob:example.org"),
|
||||
None,
|
||||
None,
|
||||
);
|
||||
assert_eq!(result.len(), 3);
|
||||
|
||||
@@ -593,7 +628,17 @@ fn test_requires_room_version_supports_condition() {
|
||||
};
|
||||
let rules = PushRules::new(vec![custom_rule]);
|
||||
result = evaluator.run(
|
||||
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
|
||||
&FilteredPushRules::py_new(
|
||||
rules,
|
||||
BTreeMap::new(),
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -369,6 +369,10 @@ pub enum KnownCondition {
|
||||
RoomVersionSupports {
|
||||
feature: Cow<'static, str>,
|
||||
},
|
||||
#[serde(rename = "io.element.msc4306.thread_subscription")]
|
||||
Msc4306ThreadSubscription {
|
||||
subscribed: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'source> IntoPyObject<'source> for Condition {
|
||||
@@ -523,6 +527,7 @@ impl PushRules {
|
||||
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(self.content.iter())
|
||||
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(base_rules::BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(self.room.iter())
|
||||
.chain(self.sender.iter())
|
||||
.chain(self.underride.iter())
|
||||
@@ -547,11 +552,13 @@ pub struct FilteredPushRules {
|
||||
msc3664_enabled: bool,
|
||||
msc4028_push_encrypted_events: bool,
|
||||
msc4210_enabled: bool,
|
||||
msc4306_enabled: bool,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl FilteredPushRules {
|
||||
#[new]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn py_new(
|
||||
push_rules: PushRules,
|
||||
enabled_map: BTreeMap<String, bool>,
|
||||
@@ -560,6 +567,7 @@ impl FilteredPushRules {
|
||||
msc3664_enabled: bool,
|
||||
msc4028_push_encrypted_events: bool,
|
||||
msc4210_enabled: bool,
|
||||
msc4306_enabled: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
push_rules,
|
||||
@@ -569,6 +577,7 @@ impl FilteredPushRules {
|
||||
msc3664_enabled,
|
||||
msc4028_push_encrypted_events,
|
||||
msc4210_enabled,
|
||||
msc4306_enabled,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -619,6 +628,10 @@ impl FilteredPushRules {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.msc4306_enabled && rule.rule_id.contains("/.io.element.msc4306.rule.") {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|r| {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.134/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -629,6 +629,70 @@ properties:
|
||||
password: mypassword
|
||||
ssh_priv_key_path: CONFDIR/id_rsa
|
||||
ssh_pub_key_path: CONFDIR/id_rsa.pub
|
||||
http_proxy:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Proxy server to use for HTTP requests.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- "http://USERNAME:PASSWORD@10.0.1.1:8080/"
|
||||
https_proxy:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Proxy server to use for HTTPS requests.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- "http://USERNAME:PASSWORD@proxy.example.com:8080/"
|
||||
no_proxy_hosts:
|
||||
type: array
|
||||
description: >-
|
||||
List of hosts, IP addresses, or IP ranges in CIDR format which should not use the
|
||||
proxy. Synapse will directly connect to these hosts.
|
||||
|
||||
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
|
||||
examples:
|
||||
- - master.hostname.example.com
|
||||
- 10.1.0.0/16
|
||||
- 172.30.0.0/16
|
||||
matrix_authentication_service:
|
||||
type: object
|
||||
description: >-
|
||||
The `matrix_authentication_service` setting configures integration with
|
||||
[Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service).
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
description: >-
|
||||
Whether or not to enable the MAS integration. If this is set to
|
||||
`false`, Synapse will use its legacy internal authentication API.
|
||||
default: false
|
||||
|
||||
endpoint:
|
||||
type: string
|
||||
format: uri
|
||||
description: >-
|
||||
The URL where Synapse can reach MAS. This *must* have the `discovery`
|
||||
and `oauth` resources mounted.
|
||||
default: http://localhost:8080
|
||||
|
||||
secret:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
A shared secret that will be used to authenticate requests from and to MAS.
|
||||
|
||||
secret_path:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
Alternative to `secret`, reading the shared secret from a file.
|
||||
The file should be a plain text file, containing only the secret.
|
||||
Synapse reads the secret from the given file once at startup.
|
||||
|
||||
examples:
|
||||
- enabled: true
|
||||
secret: someverysecuresecret
|
||||
endpoint: http://localhost:8080
|
||||
dummy_events_threshold:
|
||||
type: integer
|
||||
description: >-
|
||||
@@ -2179,9 +2243,8 @@ properties:
|
||||
with a short timeout, or restarting several different delayed events all
|
||||
at once) without the risk of being ratelimited.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
@@ -2196,12 +2259,21 @@ properties:
|
||||
Setting this to a high value allows users to report content quickly, possibly in
|
||||
duplicate. This can result in higher database usage.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
rc_room_creation:
|
||||
$ref: "#/$defs/rc"
|
||||
description: >-
|
||||
Sets rate limits for how often users are able to create rooms.
|
||||
default:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
examples:
|
||||
- per_second: 1.0
|
||||
burst_count: 5.0
|
||||
federation_rr_transactions_per_room_per_second:
|
||||
type: integer
|
||||
description: >-
|
||||
@@ -2335,6 +2407,37 @@ properties:
|
||||
default: 50M
|
||||
examples:
|
||||
- 60M
|
||||
media_upload_limits:
|
||||
type: array
|
||||
description: >-
|
||||
A list of media upload limits defining how much data a given user can
|
||||
upload in a given time period.
|
||||
|
||||
These limits are applied in addition to the `max_upload_size` limit above
|
||||
(which applies to individual uploads).
|
||||
|
||||
|
||||
An empty list means no limits are applied.
|
||||
|
||||
|
||||
These settings can be overridden using the `get_media_upload_limits_for_user`
|
||||
module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
||||
default: []
|
||||
items:
|
||||
time_period:
|
||||
type: "#/$defs/duration"
|
||||
description: >-
|
||||
The time period over which the limit applies. Required.
|
||||
max_size:
|
||||
type: "#/$defs/bytes"
|
||||
description: >-
|
||||
Amount of data that can be uploaded in the time period by the user.
|
||||
Required.
|
||||
examples:
|
||||
- - time_period: 1h
|
||||
max_size: 100M
|
||||
- time_period: 1w
|
||||
max_size: 500M
|
||||
max_image_pixels:
|
||||
$ref: "#/$defs/bytes"
|
||||
description: Maximum number of pixels that will be thumbnailed.
|
||||
@@ -2668,6 +2771,21 @@ properties:
|
||||
default: null
|
||||
examples:
|
||||
- YOUR_PUBLIC_KEY
|
||||
recaptcha_public_key_path:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows
|
||||
the public key to be specified in an external file.
|
||||
|
||||
|
||||
The file should be a plain text file, containing only the public key.
|
||||
Synapse reads the public key from the given file once at startup.
|
||||
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
default: null
|
||||
examples:
|
||||
- /path/to/key/file
|
||||
recaptcha_private_key:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
@@ -2676,6 +2794,21 @@ properties:
|
||||
default: null
|
||||
examples:
|
||||
- YOUR_PRIVATE_KEY
|
||||
recaptcha_private_key_path:
|
||||
type: ["string", "null"]
|
||||
description: >-
|
||||
An alternative to [`recaptcha_private_key`](#recaptcha_private_key):
|
||||
allows the private key to be specified in an external file.
|
||||
|
||||
|
||||
The file should be a plain text file, containing only the private key.
|
||||
Synapse reads the private key from the given file once at startup.
|
||||
|
||||
|
||||
_Added in Synapse 1.135.0._
|
||||
default: null
|
||||
examples:
|
||||
- /path/to/key/file
|
||||
enable_registration_captcha:
|
||||
type: boolean
|
||||
description: >-
|
||||
@@ -2751,6 +2884,35 @@ properties:
|
||||
default: true
|
||||
examples:
|
||||
- false
|
||||
matrix_rtc:
|
||||
type: object
|
||||
description: >-
|
||||
Options related to MatrixRTC.
|
||||
properties:
|
||||
transports:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
example: livekit
|
||||
livekit_service_url:
|
||||
type: string
|
||||
description: >-
|
||||
The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
example: https://matrix-rtc.example.com/livekit/jwt
|
||||
description:
|
||||
A list of transport types and arguments to use for MatrixRTC connections.
|
||||
default: []
|
||||
default: {}
|
||||
examples:
|
||||
- transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
enable_registration:
|
||||
type: boolean
|
||||
description: >-
|
||||
@@ -4665,8 +4827,15 @@ properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
description: >-
|
||||
Defines whether users can search the user directory. If false then
|
||||
Defines whether users can search the user directory. If `false` then
|
||||
empty responses are returned to all queries.
|
||||
|
||||
|
||||
*Warning: While the homeserver may determine which subset of users are
|
||||
searched, the Matrix specification requires homeservers to include (at
|
||||
minimum) users visible in public rooms and users sharing a room with
|
||||
the requester. Using `false` improves performance but violates this
|
||||
requirement.*
|
||||
default: true
|
||||
search_all_users:
|
||||
type: boolean
|
||||
@@ -5049,7 +5218,7 @@ properties:
|
||||
|
||||
"m.room.avatar": 50
|
||||
|
||||
"m.room.tombstone": 100
|
||||
"m.room.tombstone": 100 (150 if MSC4289 is used)
|
||||
|
||||
"m.room.server_acl": 100
|
||||
|
||||
@@ -5323,6 +5492,9 @@ properties:
|
||||
push_rules:
|
||||
type: string
|
||||
description: Name of a worker assigned to the `push_rules` stream.
|
||||
device_lists:
|
||||
type: string
|
||||
description: Name of a worker assigned to the `device_lists` stream.
|
||||
default: {}
|
||||
examples:
|
||||
- events: worker1
|
||||
|
||||
@@ -18,7 +18,7 @@ import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
from typing import Collection, Optional, Sequence
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bullseye
|
||||
@@ -32,7 +32,7 @@ DISTS = (
|
||||
"debian:sid", # (rolling distro, no EOL)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
||||
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
||||
"ubuntu:plucky", # 25.04 (EOL 2026-01)
|
||||
"debian:trixie", # (EOL not specified yet)
|
||||
)
|
||||
|
||||
@@ -54,7 +54,7 @@ class Builder:
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
self.active_containers: Set[str] = set()
|
||||
self.active_containers: set[str] = set()
|
||||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import tomli
|
||||
|
||||
@@ -33,7 +32,7 @@ def main() -> None:
|
||||
|
||||
# Poetry 1.3+ lockfile format:
|
||||
# There's a `files` inline table in each [[package]]
|
||||
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
|
||||
packages_to_assets: dict[str, list[dict[str, str]]] = {
|
||||
package["name"]: package["files"] for package in lockfile_content["package"]
|
||||
}
|
||||
|
||||
|
||||
@@ -47,11 +47,7 @@ from contextlib import contextmanager
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
@@ -69,7 +65,7 @@ from synapse._pydantic_compat import (
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
|
||||
constr,
|
||||
conbytes,
|
||||
conint,
|
||||
@@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
|
||||
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
|
||||
for field in cls.__fields__.values():
|
||||
# Note that field.type_ and field.outer_type are computed based on the
|
||||
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||
@@ -212,7 +208,7 @@ def lint() -> int:
|
||||
return os.EX_DATAERR if failures else os.EX_OK
|
||||
|
||||
|
||||
def do_lint() -> Set[str]:
|
||||
def do_lint() -> set[str]:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||
failures = set()
|
||||
|
||||
@@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None:
|
||||
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||
# > If exec gets two separate objects as globals and locals, the code will be
|
||||
# > executed as if it were embedded in a class definition.
|
||||
globals_: Dict[str, object]
|
||||
locals_: Dict[str, object]
|
||||
globals_: dict[str, object]
|
||||
locals_: dict[str, object]
|
||||
globals_ = locals_ = {}
|
||||
exec(textwrap.dedent(source), globals_, locals_)
|
||||
|
||||
@@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase):
|
||||
("bool"),
|
||||
("Optional[str]",),
|
||||
("Union[None, str]",),
|
||||
("List[str]",),
|
||||
("List[List[str]]",),
|
||||
("Dict[StrictStr, str]",),
|
||||
("Dict[str, StrictStr]",),
|
||||
("list[str]",),
|
||||
("list[list[str]]",),
|
||||
("dict[StrictStr, str]",),
|
||||
("dict[str, StrictStr]",),
|
||||
("TypedDict('D', x=int)",),
|
||||
]
|
||||
)
|
||||
@@ -425,9 +421,9 @@ class TestFieldTypeInspection(unittest.TestCase):
|
||||
("constr(strict=True, min_length=10)",),
|
||||
("Optional[StrictStr]",),
|
||||
("Union[None, StrictStr]",),
|
||||
("List[StrictStr]",),
|
||||
("List[List[StrictStr]]",),
|
||||
("Dict[StrictStr, StrictStr]",),
|
||||
("list[StrictStr]",),
|
||||
("list[list[StrictStr]]",),
|
||||
("dict[StrictStr, StrictStr]",),
|
||||
("TypedDict('D', x=StrictInt)",),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# Also checks that schema deltas do not try and create or drop indices.
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
import git
|
||||
@@ -48,16 +48,16 @@ def main(force_colors: bool) -> None:
|
||||
|
||||
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
|
||||
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(r, locals)
|
||||
current_schema_version = locals["SCHEMA_VERSION"]
|
||||
|
||||
diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
|
||||
# Get the schema version of the local file to check against current schema on develop
|
||||
with open("synapse/storage/schema/__init__.py") as file:
|
||||
local_schema = file.read()
|
||||
new_locals: Dict[str, Any] = {}
|
||||
new_locals: dict[str, Any] = {}
|
||||
exec(local_schema, new_locals)
|
||||
local_schema_version = new_locals["SCHEMA_VERSION"]
|
||||
|
||||
|
||||
@@ -230,6 +230,7 @@ test_packages=(
|
||||
./tests/msc3967
|
||||
./tests/msc4140
|
||||
./tests/msc4155
|
||||
./tests/msc4306
|
||||
)
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
|
||||
@@ -43,7 +43,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
from typing import Any, Mapping, Optional, Union
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -147,7 +147,7 @@ def request(
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
headers: dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
request: PreparedRequest,
|
||||
verify: Optional[Union[bool, str]],
|
||||
proxies: Optional[Mapping[str, str]] = None,
|
||||
cert: Optional[Union[Tuple[str, str], str]] = None,
|
||||
cert: Optional[Union[tuple[str, str], str]] = None,
|
||||
) -> HTTPConnectionPool:
|
||||
# overrides the get_connection_with_tls_context() method in the base class
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
@@ -326,7 +326,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _lookup(server_name: str) -> Tuple[str, int, str]:
|
||||
def _lookup(server_name: str) -> tuple[str, int, str]:
|
||||
"""
|
||||
Do an SRV lookup on a server name and return the host:port to connect to
|
||||
Given the server_name (after any .well-known lookup), return the host, port and
|
||||
|
||||
@@ -473,6 +473,10 @@ def section(prop: str, values: dict) -> str:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
# For Windows: reconfigure the terminal to be UTF-8 for `print()` calls.
|
||||
if sys.platform == "win32":
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
|
||||
def usage(err_msg: str) -> int:
|
||||
script_name = (sys.argv[:1] or ["__main__.py"])[0]
|
||||
print(err_msg, file=sys.stderr)
|
||||
@@ -485,7 +489,10 @@ def main() -> None:
|
||||
exit(usage("Too many arguments."))
|
||||
if not (filepath := (sys.argv[1:] or [""])[0]):
|
||||
exit(usage("No schema file provided."))
|
||||
with open(filepath) as f:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
# Note: Windows requires that we specify the encoding otherwise it uses
|
||||
# things like CP-1251, which can cause explosions.
|
||||
# See https://github.com/yaml/pyyaml/issues/123 for more info.
|
||||
return yaml.safe_load(f)
|
||||
|
||||
schema = read_json_file_arg()
|
||||
|
||||
@@ -23,28 +23,243 @@
|
||||
can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
from typing import Callable, Optional, Tuple, Type, Union
|
||||
import enum
|
||||
from typing import Callable, Mapping, Optional, Union
|
||||
|
||||
import attr
|
||||
import mypy.types
|
||||
from mypy.erasetype import remove_instance_last_known_values
|
||||
from mypy.errorcodes import ErrorCode
|
||||
from mypy.nodes import ARG_NAMED_OPT, TempNode, Var
|
||||
from mypy.plugin import FunctionSigContext, MethodSigContext, Plugin
|
||||
from mypy.nodes import ARG_NAMED_OPT, ListExpr, NameExpr, TempNode, TupleExpr, Var
|
||||
from mypy.plugin import (
|
||||
ClassDefContext,
|
||||
Context,
|
||||
FunctionLike,
|
||||
FunctionSigContext,
|
||||
MethodSigContext,
|
||||
MypyFile,
|
||||
Plugin,
|
||||
)
|
||||
from mypy.typeops import bind_self
|
||||
from mypy.types import (
|
||||
AnyType,
|
||||
CallableType,
|
||||
Instance,
|
||||
NoneType,
|
||||
Options,
|
||||
TupleType,
|
||||
TypeAliasType,
|
||||
TypeVarType,
|
||||
UninhabitedType,
|
||||
UnionType,
|
||||
)
|
||||
from mypy_zope import plugin as mypy_zope_plugin
|
||||
from pydantic.mypy import plugin as mypy_pydantic_plugin
|
||||
|
||||
PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL = ErrorCode(
|
||||
"missing-server-name-label",
|
||||
"`SERVER_NAME_LABEL` required in metric",
|
||||
category="per-homeserver-tenant-metrics",
|
||||
)
|
||||
|
||||
PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode(
|
||||
"metric-type-missing-from-list",
|
||||
"Every Prometheus metric type must be included in the `prometheus_metric_fullname_to_label_arg_map`.",
|
||||
category="per-homeserver-tenant-metrics",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_LATER = ErrorCode(
|
||||
"call-later-not-tracked",
|
||||
"Prefer using `synapse.util.Clock.call_later` instead of `reactor.callLater`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_LOOPING_CALL = ErrorCode(
|
||||
"prefer-synapse-clock-looping-call",
|
||||
"Prefer using `synapse.util.Clock.looping_call` instead of `task.LoopingCall`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode(
|
||||
"prefer-synapse-clock-call-when-running",
|
||||
"Prefer using `synapse.util.Clock.call_when_running` instead of `reactor.callWhenRunning`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER = ErrorCode(
|
||||
"prefer-synapse-clock-add-system-event-trigger",
|
||||
"Prefer using `synapse.util.Clock.add_system_event_trigger` instead of `reactor.addSystemEventTrigger`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
MULTIPLE_INTERNAL_CLOCKS_CREATED = ErrorCode(
|
||||
"multiple-internal-clocks",
|
||||
"Only one instance of `clock.Clock` should be created",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
UNTRACKED_BACKGROUND_PROCESS = ErrorCode(
|
||||
"untracked-background-process",
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare `run_as_background_process`",
|
||||
category="synapse-tracked-calls",
|
||||
)
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
# defining a sentinel in this way allows mypy to correctly handle the
|
||||
# type of a dictionary lookup and subsequent type narrowing.
|
||||
UNSET_SENTINEL = object()
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class ArgLocation:
|
||||
keyword_name: str
|
||||
"""
|
||||
The keyword argument name for this argument
|
||||
"""
|
||||
position: int
|
||||
"""
|
||||
The 0-based positional index of this argument
|
||||
"""
|
||||
|
||||
|
||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
|
||||
# `Collector` subclasses:
|
||||
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Histogram": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Gauge": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Summary": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Info": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Enum": ArgLocation("labelnames", 2),
|
||||
"synapse.metrics.LaterGauge": ArgLocation("labelnames", 2),
|
||||
"synapse.metrics.InFlightGauge": ArgLocation("labels", 2),
|
||||
"synapse.metrics.GaugeBucketCollector": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.registry.Collector": None,
|
||||
"prometheus_client.registry._EmptyCollector": None,
|
||||
"prometheus_client.registry.CollectorRegistry": None,
|
||||
"prometheus_client.process_collector.ProcessCollector": None,
|
||||
"prometheus_client.platform_collector.PlatformCollector": None,
|
||||
"prometheus_client.gc_collector.GCCollector": None,
|
||||
"synapse.metrics._gc.GCCounts": None,
|
||||
"synapse.metrics._gc.PyPyGCStats": None,
|
||||
"synapse.metrics._reactor_metrics.ReactorLastSeenMetric": None,
|
||||
"synapse.metrics.CPUMetrics": None,
|
||||
"synapse.metrics.jemalloc.JemallocCollector": None,
|
||||
"synapse.util.metrics.DynamicCollectorRegistry": None,
|
||||
"synapse.metrics.background_process_metrics._Collector": None,
|
||||
#
|
||||
# `Metric` subclasses:
|
||||
"prometheus_client.metrics_core.Metric": None,
|
||||
"prometheus_client.metrics_core.UnknownMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.CounterMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.GaugeMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.SummaryMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.InfoMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.HistogramMetricFamily": ArgLocation("labels", 3),
|
||||
"prometheus_client.metrics_core.GaugeHistogramMetricFamily": ArgLocation(
|
||||
"labels", 4
|
||||
),
|
||||
"prometheus_client.metrics_core.StateSetMetricFamily": ArgLocation("labels", 3),
|
||||
"synapse.metrics.GaugeHistogramMetricFamilyWithLabels": ArgLocation(
|
||||
"labelnames", 4
|
||||
),
|
||||
}
|
||||
"""
|
||||
Map from the fullname of the Prometheus `Metric`/`Collector` classes to the keyword
|
||||
argument name and positional index of the label names. This map is useful because
|
||||
different metrics have different signatures for passing in label names and we just need
|
||||
to know where to look.
|
||||
|
||||
This map should include any metrics that we collect with Prometheus. Which corresponds
|
||||
to anything that inherits from `prometheus_client.registry.Collector`
|
||||
(`synapse.metrics._types.Collector`) or `prometheus_client.metrics_core.Metric`. The
|
||||
exhaustiveness of this list is enforced by `analyze_prometheus_metric_classes`.
|
||||
|
||||
The entries with `None` always fail the lint because they don't have a `labelnames`
|
||||
argument (therefore, no `SERVER_NAME_LABEL`), but we include them here so that people
|
||||
can notice and manually allow via a type ignore comment as the source of truth
|
||||
should be in the source code.
|
||||
"""
|
||||
|
||||
# Unbound at this point because we don't know the mypy version yet.
|
||||
# This is set in the `plugin(...)` function below.
|
||||
MypyPydanticPluginClass: type[Plugin]
|
||||
MypyZopePluginClass: type[Plugin]
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
def __init__(self, options: Options):
|
||||
super().__init__(options)
|
||||
self.mypy_pydantic_plugin = MypyPydanticPluginClass(options)
|
||||
self.mypy_zope_plugin = MypyZopePluginClass(options)
|
||||
|
||||
def set_modules(self, modules: dict[str, MypyFile]) -> None:
|
||||
"""
|
||||
This is called by mypy internals. We have to override this to ensure it's also
|
||||
called for any other plugins that we're manually handling.
|
||||
|
||||
Here is how mypy describes it:
|
||||
|
||||
> [`self._modules`] can't be set in `__init__` because it is executed too soon
|
||||
> in `build.py`. Therefore, `build.py` *must* set it later before graph processing
|
||||
> starts by calling `set_modules()`.
|
||||
"""
|
||||
super().set_modules(modules)
|
||||
self.mypy_pydantic_plugin.set_modules(modules)
|
||||
self.mypy_zope_plugin.set_modules(modules)
|
||||
|
||||
def get_base_class_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[ClassDefContext], None]]:
|
||||
def _get_base_class_hook(ctx: ClassDefContext) -> None:
|
||||
# Run any `get_base_class_hook` checks from other plugins first.
|
||||
#
|
||||
# Unfortunately, because mypy only chooses the first plugin that returns a
|
||||
# non-None value (known-limitation, c.f.
|
||||
# https://github.com/python/mypy/issues/19524), we workaround this by
|
||||
# putting our custom plugin first in the plugin order and then calling the
|
||||
# other plugin's hook manually followed by our own checks.
|
||||
if callback := self.mypy_pydantic_plugin.get_base_class_hook(fullname):
|
||||
callback(ctx)
|
||||
if callback := self.mypy_zope_plugin.get_base_class_hook(fullname):
|
||||
callback(ctx)
|
||||
|
||||
# Now run our own checks
|
||||
analyze_prometheus_metric_classes(ctx)
|
||||
|
||||
return _get_base_class_hook
|
||||
|
||||
def get_function_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
|
||||
# Strip off the unique identifier for classes that are dynamically created inside
|
||||
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
||||
# number)
|
||||
if "@" in fullname:
|
||||
fullname = fullname.split("@", 1)[0]
|
||||
|
||||
# Look for any Prometheus metrics to make sure they have the `SERVER_NAME_LABEL`
|
||||
# label.
|
||||
if fullname in prometheus_metric_fullname_to_label_arg_map.keys():
|
||||
# Because it's difficult to determine the `fullname` of the function in the
|
||||
# callback, let's just pass it in while we have it.
|
||||
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
|
||||
|
||||
if fullname == "twisted.internet.task.LoopingCall":
|
||||
return check_looping_call
|
||||
|
||||
if fullname == "synapse.util.clock.Clock":
|
||||
return check_clock_creation
|
||||
|
||||
if (
|
||||
fullname
|
||||
== "synapse.metrics.background_process_metrics.run_as_background_process"
|
||||
):
|
||||
return check_background_process
|
||||
|
||||
return None
|
||||
|
||||
def get_method_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
@@ -62,9 +277,328 @@ class SynapsePlugin(Plugin):
|
||||
):
|
||||
return check_is_cacheable_wrapper
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorTime.callLater",
|
||||
"synapse.types.ISynapseThreadlessReactor.callLater",
|
||||
"synapse.types.ISynapseReactor.callLater",
|
||||
):
|
||||
return check_call_later
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.callWhenRunning",
|
||||
"synapse.types.ISynapseThreadlessReactor.callWhenRunning",
|
||||
"synapse.types.ISynapseReactor.callWhenRunning",
|
||||
):
|
||||
return check_call_when_running
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseThreadlessReactor.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseReactor.addSystemEventTrigger",
|
||||
):
|
||||
return check_add_system_event_trigger
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def check_clock_creation(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the only `clock.Clock` instance is the one used by the `HomeServer`.
|
||||
This is so that the `HomeServer` can cancel any tracked delayed or looping calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected the only `clock.Clock` instance to be the one used by the `HomeServer`. "
|
||||
"This is so that the `HomeServer` can cancel any tracked delayed or looping calls "
|
||||
"during server shutdown",
|
||||
ctx.context,
|
||||
code=MULTIPLE_INTERNAL_CLOCKS_CREATED,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_later(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callLater` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_later` should always be used instead of `reactor.callLater`.
|
||||
This is because the `synapse.util.Clock` tracks delayed calls in order to cancel any
|
||||
outstanding calls during server shutdown. Delayed calls which are either short lived
|
||||
(<~60s) or frequently called and can be tracked via other means could be candidates for
|
||||
using `synapse.util.Clock.call_later` with `call_later_cancel_on_shutdown` set to
|
||||
`False`. There shouldn't be a need to use `reactor.callLater` outside of tests or the
|
||||
`Clock` class itself. If a need arises, you can use a type ignore comment to disable the
|
||||
check, e.g. `# type: ignore[call-later-not-tracked]`.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `reactor.callLater` calls to use `synapse.util.Clock.call_later` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_LATER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_looping_call(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `task.LoopingCall` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.looping_call` should always be used instead of `task.LoopingCall`.
|
||||
`synapse.util.Clock` tracks looping calls in order to cancel any outstanding calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `task.LoopingCall` instances to use `synapse.util.Clock.looping_call` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_LOOPING_CALL,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_when_running(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callWhenRunning` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_when_running` should always be used instead of
|
||||
`reactor.callWhenRunning`.
|
||||
|
||||
Since `reactor.callWhenRunning` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.callWhenRunning` calls to use `synapse.util.Clock.call_when_running` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_add_system_event_trigger(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.addSystemEventTrigger` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.add_system_event_trigger` should always be used instead of
|
||||
`reactor.addSystemEventTrigger`.
|
||||
|
||||
Since `reactor.addSystemEventTrigger` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.addSystemEventTrigger` calls to use `synapse.util.Clock.add_system_event_trigger` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_background_process(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that calls to `run_as_background_process` use the `HomeServer` method.
|
||||
This is so that the `HomeServer` can cancel any running background processes during
|
||||
server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare "
|
||||
"`run_as_background_process`. This is so that the `HomeServer` can cancel "
|
||||
"any background processes during server shutdown",
|
||||
ctx.context,
|
||||
code=UNTRACKED_BACKGROUND_PROCESS,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
|
||||
"""
|
||||
Cross-check the list of Prometheus metric classes against the
|
||||
`prometheus_metric_fullname_to_label_arg_map` to ensure the list is exhaustive and
|
||||
up-to-date.
|
||||
"""
|
||||
|
||||
fullname = ctx.cls.fullname
|
||||
# Strip off the unique identifier for classes that are dynamically created inside
|
||||
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
||||
# number)
|
||||
if "@" in fullname:
|
||||
fullname = fullname.split("@", 1)[0]
|
||||
|
||||
if any(
|
||||
ancestor_type.fullname
|
||||
in (
|
||||
# All of the Prometheus metric classes inherit from the `Collector`.
|
||||
"prometheus_client.registry.Collector",
|
||||
"synapse.metrics._types.Collector",
|
||||
# And custom metrics that inherit from `Metric`.
|
||||
"prometheus_client.metrics_core.Metric",
|
||||
)
|
||||
for ancestor_type in ctx.cls.info.mro
|
||||
):
|
||||
if fullname not in prometheus_metric_fullname_to_label_arg_map:
|
||||
ctx.api.fail(
|
||||
f"Expected {fullname} to be in `prometheus_metric_fullname_to_label_arg_map`, "
|
||||
f"but it was not found. This is a problem with our custom mypy plugin. "
|
||||
f"Please add it to the map.",
|
||||
Context(),
|
||||
code=PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK,
|
||||
)
|
||||
|
||||
|
||||
def check_prometheus_metric_instantiation(
|
||||
ctx: FunctionSigContext, fullname: str
|
||||
) -> CallableType:
|
||||
"""
|
||||
Ensure that the `prometheus_client` metrics include the `SERVER_NAME_LABEL` label
|
||||
when instantiated.
|
||||
|
||||
This is important because we support multiple Synapse instances running in the same
|
||||
process, where all metrics share a single global `REGISTRY`. The `server_name` label
|
||||
ensures metrics are correctly separated by homeserver.
|
||||
|
||||
There are also some metrics that apply at the process level, such as CPU usage,
|
||||
Python garbage collection, and Twisted reactor tick time, which shouldn't have the
|
||||
`SERVER_NAME_LABEL`. In those cases, use a type ignore comment to disable the
|
||||
check, e.g. `# type: ignore[missing-server-name-label]`.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
fullname: The fully qualified name of the function being called,
|
||||
e.g. `"prometheus_client.metrics.Counter"`
|
||||
"""
|
||||
# The true signature, this isn't being modified so this is what will be returned.
|
||||
signature = ctx.default_signature
|
||||
|
||||
# Find where the label names argument is in the function signature.
|
||||
arg_location = prometheus_metric_fullname_to_label_arg_map.get(
|
||||
fullname, Sentinel.UNSET_SENTINEL
|
||||
)
|
||||
assert arg_location is not Sentinel.UNSET_SENTINEL, (
|
||||
f"Expected to find {fullname} in `prometheus_metric_fullname_to_label_arg_map`, "
|
||||
f"but it was not found. This is a problem with our custom mypy plugin. "
|
||||
f"Please add it to the map. Context: {ctx.context}"
|
||||
)
|
||||
# People should be using `# type: ignore[missing-server-name-label]` for
|
||||
# process-level metrics that should not have the `SERVER_NAME_LABEL`.
|
||||
if arg_location is None:
|
||||
ctx.api.fail(
|
||||
f"{signature.name} does not have a `labelnames`/`labels` argument "
|
||||
"(if this is untrue, update `prometheus_metric_fullname_to_label_arg_map` "
|
||||
"in our custom mypy plugin) and should probably have a type ignore comment, "
|
||||
"e.g. `# type: ignore[missing-server-name-label]`. The reason we don't "
|
||||
"automatically ignore this is the source of truth should be in the source code.",
|
||||
ctx.context,
|
||||
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
|
||||
)
|
||||
return signature
|
||||
|
||||
# Sanity check the arguments are still as expected in this version of
|
||||
# `prometheus_client`. ex. `Counter(name, documentation, labelnames, ...)`
|
||||
#
|
||||
# `signature.arg_names` should be: ["name", "documentation", "labelnames", ...]
|
||||
if (
|
||||
len(signature.arg_names) < (arg_location.position + 1)
|
||||
or signature.arg_names[arg_location.position] != arg_location.keyword_name
|
||||
):
|
||||
ctx.api.fail(
|
||||
f"Expected argument number {arg_location.position + 1} of {signature.name} to be `labelnames`/`labels`, "
|
||||
f"but got {signature.arg_names[arg_location.position]}",
|
||||
ctx.context,
|
||||
)
|
||||
return signature
|
||||
|
||||
# Ensure mypy is passing the correct number of arguments because we are doing some
|
||||
# dirty indexing into `ctx.args` later on.
|
||||
assert len(ctx.args) == len(signature.arg_names), (
|
||||
f"Expected the list of arguments in the {signature.name} signature ({len(signature.arg_names)})"
|
||||
f"to match the number of arguments from the function signature context ({len(ctx.args)})"
|
||||
)
|
||||
|
||||
# Check if the `labelnames` argument includes `SERVER_NAME_LABEL`
|
||||
#
|
||||
# `ctx.args` should look like this:
|
||||
# ```
|
||||
# [
|
||||
# [StrExpr("name")],
|
||||
# [StrExpr("documentation")],
|
||||
# [ListExpr([StrExpr("label1"), StrExpr("label2")])]
|
||||
# ...
|
||||
# ]
|
||||
# ```
|
||||
labelnames_arg_expression = (
|
||||
ctx.args[arg_location.position][0]
|
||||
if len(ctx.args[arg_location.position]) > 0
|
||||
else None
|
||||
)
|
||||
if isinstance(labelnames_arg_expression, (ListExpr, TupleExpr)):
|
||||
# Check if the `labelnames` argument includes the `server_name` label (`SERVER_NAME_LABEL`).
|
||||
for labelname_expression in labelnames_arg_expression.items:
|
||||
if (
|
||||
isinstance(labelname_expression, NameExpr)
|
||||
and labelname_expression.fullname == "synapse.metrics.SERVER_NAME_LABEL"
|
||||
):
|
||||
# Found the `SERVER_NAME_LABEL`, all good!
|
||||
break
|
||||
else:
|
||||
ctx.api.fail(
|
||||
f"Expected {signature.name} to include `SERVER_NAME_LABEL` in the list of labels. "
|
||||
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
|
||||
"to disable this check.",
|
||||
ctx.context,
|
||||
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
|
||||
)
|
||||
else:
|
||||
ctx.api.fail(
|
||||
f"Expected the `labelnames` argument of {signature.name} to be a list of label names "
|
||||
f"(including `SERVER_NAME_LABEL`), but got {labelnames_arg_expression}. "
|
||||
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
|
||||
"to disable this check.",
|
||||
ctx.context,
|
||||
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
|
||||
)
|
||||
return signature
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def _get_true_return_type(signature: CallableType) -> mypy.types.Type:
|
||||
"""
|
||||
Get the "final" return type of a callable which might return an Awaitable/Deferred.
|
||||
@@ -261,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
||||
|
||||
def is_cacheable(
|
||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Check if a particular type is cachable.
|
||||
|
||||
@@ -371,11 +905,14 @@ def is_cacheable(
|
||||
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
|
||||
|
||||
|
||||
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||
def plugin(version: str) -> type[SynapsePlugin]:
|
||||
global MypyPydanticPluginClass, MypyZopePluginClass
|
||||
# This is the entry point of the plugin, and lets us deal with the fact
|
||||
# that the mypy plugin interface is *not* stable by looking at the version
|
||||
# string.
|
||||
#
|
||||
# However, since we pin the version of mypy Synapse uses in CI, we don't
|
||||
# really care.
|
||||
MypyPydanticPluginClass = mypy_pydantic_plugin(version)
|
||||
MypyZopePluginClass = mypy_zope_plugin(version)
|
||||
return SynapsePlugin
|
||||
|
||||
@@ -32,15 +32,17 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Match, Optional, Union
|
||||
from typing import Any, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
import click
|
||||
import commonmark
|
||||
import git
|
||||
import github
|
||||
import github.Auth
|
||||
from click.exceptions import ClickException
|
||||
from git import GitCommandError, Repo
|
||||
from github import BadCredentialsException, Github
|
||||
from markdown_it import MarkdownIt
|
||||
from packaging import version
|
||||
|
||||
|
||||
@@ -397,7 +399,7 @@ def _tag(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
# Create a new draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
release = gh_repo.create_git_release(
|
||||
tag=tag_name,
|
||||
@@ -428,7 +430,7 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
@@ -441,7 +443,7 @@ def _publish(gh_token: str) -> None:
|
||||
return
|
||||
|
||||
# Publish the draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
for release in gh_repo.get_releases():
|
||||
if release.title == tag_name:
|
||||
@@ -486,8 +488,13 @@ def _upload(gh_token: Optional[str]) -> None:
|
||||
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
if gh_token:
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
else:
|
||||
# Use github anonymously.
|
||||
gh = Github()
|
||||
|
||||
# Query all the assets corresponding to this release.
|
||||
gh = Github(gh_token)
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
gh_release = gh_repo.get_release(tag_name)
|
||||
|
||||
@@ -639,7 +646,16 @@ def _notify(message: str) -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def merge_back() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -687,7 +703,16 @@ def _merge_back() -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -696,18 +721,31 @@ def _announce() -> None:
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
is_rc = "rc" in tag_name
|
||||
|
||||
release_text = f"""
|
||||
### Synapse {current_version} {"🧪" if is_rc else "🚀"}
|
||||
|
||||
click.echo(
|
||||
f"""
|
||||
Hi everyone. Synapse {current_version} has just been released.
|
||||
"""
|
||||
|
||||
if "rc" in tag_name:
|
||||
release_text += (
|
||||
"\nThis is a release candidate. Please help us test it out "
|
||||
"before the final release by deploying it to non-production environments, "
|
||||
"and reporting any issues you find to "
|
||||
"[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n"
|
||||
)
|
||||
|
||||
release_text += f"""
|
||||
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
|
||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||
[debs](https://packages.matrix.org/debian/) | \
|
||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||
)
|
||||
|
||||
if "rc" in tag_name:
|
||||
click.echo(release_text)
|
||||
|
||||
if is_rc:
|
||||
click.echo(
|
||||
"""
|
||||
Announce the RC in
|
||||
@@ -732,7 +770,7 @@ Ask the designated people to do the blog and tweets."""
|
||||
def full(gh_token: str) -> None:
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
click.echo("1. If this is a security release, read the security wiki page.")
|
||||
@@ -801,8 +839,12 @@ def get_repo_and_check_clean_checkout(
|
||||
raise click.ClickException(
|
||||
f"{path} is not a git repository (expecting a {name} repository)."
|
||||
)
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException(f"Uncommitted changes exist in {path}.")
|
||||
while repo.is_dirty():
|
||||
if not click.confirm(
|
||||
f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"
|
||||
):
|
||||
raise click.ClickException("Aborted.")
|
||||
|
||||
return repo
|
||||
|
||||
|
||||
@@ -814,7 +856,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
try:
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
|
||||
# We need to lookup name to trigger a request.
|
||||
_name = gh.get_user().name
|
||||
@@ -851,7 +893,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
# First we parse the changelog so that we can split it into sections based
|
||||
# on the release headings.
|
||||
ast = commonmark.Parser().parse(changes)
|
||||
tokens = MarkdownIt().parse(changes)
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class VersionSection:
|
||||
@@ -861,20 +903,23 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
start_line: int
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
|
||||
headings: List[VersionSection] = []
|
||||
for node, _ in ast.walker():
|
||||
# We look for all text nodes that are in a level 1 heading.
|
||||
if node.t != "text":
|
||||
headings: list[VersionSection] = []
|
||||
for i, token in enumerate(tokens):
|
||||
# We look for level 1 headings (h1 tags).
|
||||
if token.type != "heading_open" or token.tag != "h1":
|
||||
continue
|
||||
|
||||
if node.parent.t != "heading" or node.parent.level != 1:
|
||||
continue
|
||||
# The next token should be an inline token containing the heading text
|
||||
if i + 1 < len(tokens) and tokens[i + 1].type == "inline":
|
||||
heading_text = tokens[i + 1].content
|
||||
# The map property contains [line_begin, line_end] (0-based)
|
||||
start_line = token.map[0] if token.map else 0
|
||||
|
||||
# If we have a previous heading then we update its `end_line`.
|
||||
if headings:
|
||||
headings[-1].end_line = node.parent.sourcepos[0][0] - 1
|
||||
# If we have a previous heading then we update its `end_line`.
|
||||
if headings:
|
||||
headings[-1].end_line = start_line
|
||||
|
||||
headings.append(VersionSection(node.literal, node.parent.sourcepos[0][0] - 1))
|
||||
headings.append(VersionSection(heading_text, start_line))
|
||||
|
||||
changes_by_line = changes.split("\n")
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict, Iterator, Optional, Tuple
|
||||
from typing import Any, Iterator, Optional
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
@@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
||||
# thing to do is exec the code. Luckily it has only ever existed in
|
||||
# a file which imports nothing else from Synapse.
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
||||
schema_version = locals["SCHEMA_VERSION"]
|
||||
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
||||
|
||||
@@ -30,7 +30,7 @@ from signedjson.sign import sign_json
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.json import json_encoder
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -7,18 +7,14 @@ from __future__ import annotations
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Hashable,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
ValuesView,
|
||||
@@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True)
|
||||
_SD = TypeVar("_SD", bound=SortedDict)
|
||||
_Key = Callable[[_T], Any]
|
||||
|
||||
class SortedDict(Dict[_KT, _VT]):
|
||||
class SortedDict(dict[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
|
||||
@@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_KT]]: ...
|
||||
@@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
def pop(self, key: _KT) -> _VT: ...
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
@@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
# def update(self, **kwargs: _VT) -> None: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Type[SortedDict[_KT, _VT]],
|
||||
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
||||
) -> tuple[
|
||||
type[SortedDict[_KT, _VT]],
|
||||
tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]],
|
||||
]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
@@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _KT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ...
|
||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _VT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_VT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
@@ -9,12 +9,9 @@ from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]):
|
||||
): ...
|
||||
# NB: currently mypy does not honour return type, see mypy #3307
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
@@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def _getitem(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def _getitem(self, index: slice) -> List[_T]: ...
|
||||
def _getitem(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None: ...
|
||||
@overload
|
||||
@@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
|
||||
@@ -10,13 +10,9 @@ from typing import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def _fromset(
|
||||
cls, values: Set[_T], key: Optional[_Key[_T]] = ...
|
||||
cls, values: set[_T], key: Optional[_Key[_T]] = ...
|
||||
) -> SortedSet[_T]: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_T]]: ...
|
||||
@@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __eq__(self, other: Any) -> bool: ...
|
||||
def __ne__(self, other: Any) -> bool: ...
|
||||
@@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ...
|
||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
self,
|
||||
minimum: Optional[_T] = ...,
|
||||
maximum: Optional[_T] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def index(
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any, List, Optional, Type, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol):
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
|
||||
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
@@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ...
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: ConnectionHandler
|
||||
pool: List[RedisProtocol]
|
||||
pool: list[RedisProtocol]
|
||||
replyTimeout: Optional[int]
|
||||
def __init__(
|
||||
self,
|
||||
@@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
dbid: Optional[int],
|
||||
poolsize: int,
|
||||
isLazy: bool = False,
|
||||
handler: Type = ConnectionHandler,
|
||||
handler: type = ConnectionHandler,
|
||||
charset: str = "utf-8",
|
||||
password: Optional[str] = None,
|
||||
replyTimeout: Optional[int] = None,
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from PIL import ImageFile
|
||||
|
||||
@@ -45,16 +45,6 @@ if py_version < (3, 9):
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
if strtobool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", "0")):
|
||||
from incremental import Version
|
||||
|
||||
import twisted
|
||||
|
||||
# We need a bugfix that is included in Twisted 21.2.0:
|
||||
# https://twistedmatrix.com/trac/ticket/9787
|
||||
if twisted.version < Version("Twisted", 21, 2, 0):
|
||||
print("Using asyncio reactor requires Twisted>=21.2.0")
|
||||
sys.exit(1)
|
||||
|
||||
import asyncio
|
||||
|
||||
from twisted.internet import asyncioreactor
|
||||
@@ -80,7 +70,7 @@ try:
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
def _immutabledict_cb(d: immutabledict) -> dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
|
||||
@@ -34,9 +34,11 @@ HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
@@ -48,15 +50,18 @@ if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
from pydantic.v1.typing import get_args
|
||||
else:
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
@@ -68,6 +73,7 @@ else:
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
@@ -75,6 +81,7 @@ else:
|
||||
|
||||
__all__ = (
|
||||
"HAS_PYDANTIC_V2",
|
||||
"AnyHttpUrl",
|
||||
"BaseModel",
|
||||
"constr",
|
||||
"conbytes",
|
||||
@@ -83,6 +90,7 @@ __all__ = (
|
||||
"ErrorWrapper",
|
||||
"Extra",
|
||||
"Field",
|
||||
"FilePath",
|
||||
"get_args",
|
||||
"MissingError",
|
||||
"parse_obj_as",
|
||||
@@ -92,4 +100,5 @@ __all__ = (
|
||||
"StrictStr",
|
||||
"ValidationError",
|
||||
"validator",
|
||||
"root_validator",
|
||||
)
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
from typing import Iterable, Optional, Pattern
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -81,7 +81,7 @@ class EnumerationResource(HttpServer):
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self.registrations: dict[tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
@@ -115,7 +115,7 @@ class EnumerationResource(HttpServer):
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
@@ -142,7 +142,7 @@ def get_registered_paths_for_hs(
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
@@ -153,15 +153,24 @@ def get_registered_paths_for_default(
|
||||
"""
|
||||
|
||||
hs = MockHomeserver(base_config, worker_app)
|
||||
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
return get_registered_paths_for_hs(hs)
|
||||
registered_paths = get_registered_paths_for_hs(hs)
|
||||
# NOTE: a more robust implementation would properly shutdown/cleanup each server
|
||||
# to avoid resource buildup.
|
||||
# However, the call to `shutdown` is `async` so it would require additional complexity here.
|
||||
# We are intentionally skipping this cleanup because this is a short-lived, one-off
|
||||
# utility script where the simpler approach is sufficient and we shouldn't run into
|
||||
# any resource buildup issues.
|
||||
|
||||
return registered_paths
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
@@ -171,13 +180,13 @@ def elide_http_methods_if_unconflicting(
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]],
|
||||
) -> Dict[str, Set[str]]:
|
||||
methods_and_paths: Iterable[tuple[str, str]],
|
||||
) -> dict[str, set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
result: dict[str, set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
@@ -201,8 +210,8 @@ def elide_http_methods_if_unconflicting(
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
@@ -261,8 +270,8 @@ def main() -> None:
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
categories_to_methods_and_paths: dict[
|
||||
Optional[str], dict[tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
@@ -274,7 +283,7 @@ def main() -> None:
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
@@ -73,8 +73,18 @@ def main() -> None:
|
||||
|
||||
pw = unicodedata.normalize("NFKC", password)
|
||||
|
||||
bytes_to_hash = pw.encode("utf8") + password_pepper.encode("utf8")
|
||||
if len(bytes_to_hash) > 72:
|
||||
# bcrypt only looks at the first 72 bytes
|
||||
print(
|
||||
f"Password is too long ({len(bytes_to_hash)} bytes); truncating to 72 bytes for bcrypt. "
|
||||
"This is expected behaviour and will not affect a user's ability to log in. 72 bytes is "
|
||||
"sufficient entropy for a password."
|
||||
)
|
||||
bytes_to_hash = bytes_to_hash[:72]
|
||||
|
||||
hashed = bcrypt.hashpw(
|
||||
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||
bytes_to_hash,
|
||||
bcrypt.gensalt(bcrypt_rounds),
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
@@ -26,10 +26,11 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from typing_extensions import Never
|
||||
|
||||
_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
|
||||
Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path'
|
||||
@@ -40,6 +41,10 @@ _NO_SHARED_SECRET_OPTS_ERROR = """\
|
||||
No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config.
|
||||
"""
|
||||
|
||||
_EMPTY_SHARED_SECRET_PATH_OPTS_ERROR = """\
|
||||
The secret given via `registration_shared_secret_path` must not be empty.
|
||||
"""
|
||||
|
||||
_DEFAULT_SERVER_URL = "http://localhost:8008"
|
||||
|
||||
|
||||
@@ -170,6 +175,12 @@ def register_new_user(
|
||||
)
|
||||
|
||||
|
||||
def bail(err_msg: str) -> Never:
|
||||
"""Prints the given message to stderr and exits."""
|
||||
print(err_msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
logging.captureWarnings(True)
|
||||
|
||||
@@ -251,7 +262,7 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
config: Optional[dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
@@ -262,15 +273,20 @@ def main() -> None:
|
||||
assert config is not None
|
||||
|
||||
secret = config.get("registration_shared_secret")
|
||||
if not isinstance(secret, (str, type(None))):
|
||||
bail("registration_shared_secret is not a string.")
|
||||
secret_file = config.get("registration_shared_secret_path")
|
||||
if secret_file:
|
||||
if secret:
|
||||
print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if not isinstance(secret_file, (str, type(None))):
|
||||
bail("registration_shared_secret_path is not a string.")
|
||||
|
||||
if not secret and not secret_file:
|
||||
bail(_NO_SHARED_SECRET_OPTS_ERROR)
|
||||
elif secret and secret_file:
|
||||
bail(_CONFLICTING_SHARED_SECRET_OPTS_ERROR)
|
||||
elif not secret and secret_file:
|
||||
secret = _read_file(secret_file, "registration_shared_secret_path").strip()
|
||||
if not secret:
|
||||
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if not secret:
|
||||
bail(_EMPTY_SHARED_SECRET_PATH_OPTS_ERROR)
|
||||
|
||||
if args.password_file:
|
||||
password = _read_file(args.password_file, "password-file").strip()
|
||||
@@ -334,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
|
||||
@@ -23,40 +23,41 @@ import argparse
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.config._base import (
|
||||
Config,
|
||||
ConfigError,
|
||||
RootConfig,
|
||||
find_config_files,
|
||||
read_config_files,
|
||||
)
|
||||
from synapse.config.database import DatabaseConfig
|
||||
from synapse.config.server import ServerConfig
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
|
||||
class ReviewConfig(RootConfig):
|
||||
"A config class that just pulls out the database config"
|
||||
"A config class that just pulls out the server and database config"
|
||||
|
||||
config_classes = [DatabaseConfig]
|
||||
config_classes = [ServerConfig, DatabaseConfig]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class UserInfo:
|
||||
user_id: str
|
||||
creation_ts: int
|
||||
emails: List[str] = attr.Factory(list)
|
||||
private_rooms: List[str] = attr.Factory(list)
|
||||
public_rooms: List[str] = attr.Factory(list)
|
||||
ips: List[str] = attr.Factory(list)
|
||||
emails: list[str] = attr.Factory(list)
|
||||
private_rooms: list[str] = attr.Factory(list)
|
||||
public_rooms: list[str] = attr.Factory(list)
|
||||
ips: list[str] = attr.Factory(list)
|
||||
|
||||
|
||||
def get_recent_users(
|
||||
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
|
||||
) -> List[UserInfo]:
|
||||
) -> list[UserInfo]:
|
||||
"""Fetches recently registered users and some info on them."""
|
||||
|
||||
sql = """
|
||||
@@ -148,6 +149,10 @@ def main() -> None:
|
||||
config_dict = read_config_files(config_files)
|
||||
config.parse_config_dict(config_dict, "", "")
|
||||
|
||||
server_name = config.server.server_name
|
||||
if not isinstance(server_name, str):
|
||||
raise ConfigError("Must be a string", ("server_name",))
|
||||
|
||||
since_ms = time.time() * 1000 - Config.parse_duration(config_args.since)
|
||||
exclude_users_with_email = config_args.exclude_emails
|
||||
exclude_users_with_appservice = config_args.exclude_app_service
|
||||
@@ -159,7 +164,12 @@ def main() -> None:
|
||||
|
||||
engine = create_engine(database_config.config)
|
||||
|
||||
with make_conn(database_config, engine, "review_recent_signups") as db_conn:
|
||||
with make_conn(
|
||||
db_config=database_config,
|
||||
engine=engine,
|
||||
default_txn_name="review_recent_signups",
|
||||
server_name=server_name,
|
||||
) as db_conn:
|
||||
# This generates a type of Cursor, not LoggingTransaction.
|
||||
user_infos = get_recent_users(
|
||||
db_conn.cursor(),
|
||||
|
||||
@@ -33,15 +33,10 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
cast,
|
||||
@@ -54,11 +49,11 @@ from twisted.internet import defer, reactor as reactor_
|
||||
from synapse.config.database import DatabaseConnectionConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.notifier import ReplicationNotifier
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
@@ -98,7 +93,6 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION, Clock
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -136,6 +130,7 @@ BOOLEAN_COLUMNS = {
|
||||
"has_known_state",
|
||||
"is_encrypted",
|
||||
],
|
||||
"thread_subscriptions": ["subscribed", "automatic"],
|
||||
"users": ["shadow_banned", "approved", "locked", "suspended"],
|
||||
"un_partial_stated_event_stream": ["rejection_status_changed"],
|
||||
"users_who_share_rooms": ["share_private"],
|
||||
@@ -190,13 +185,18 @@ APPEND_ONLY_TABLES = [
|
||||
"users",
|
||||
]
|
||||
|
||||
# These tables declare their id column with "PRIMARY KEY AUTOINCREMENT" on sqlite side
|
||||
# and with "PRIMARY KEY GENERATED ALWAYS AS IDENTITY" on postgres side. This creates an
|
||||
# implicit sequence that needs its value to be migrated separately. Additionally,
|
||||
# inserting on postgres side needs to use the "OVERRIDING SYSTEM VALUE" modifier.
|
||||
AUTOINCREMENT_TABLES = {
|
||||
"sliding_sync_connections",
|
||||
"sliding_sync_connection_positions",
|
||||
"sliding_sync_connection_required_state",
|
||||
"state_groups_pending_deletion",
|
||||
}
|
||||
|
||||
IGNORED_TABLES = {
|
||||
# Porting the auto generated sequence in this table is non-trivial.
|
||||
# None of the entries in this list are mandatory for Synapse to keep working.
|
||||
# If state group disk space is an issue after the port, the
|
||||
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
|
||||
"state_groups_pending_deletion",
|
||||
# We don't port these tables, as they're a faff and we can regenerate
|
||||
# them anyway.
|
||||
"user_directory",
|
||||
@@ -239,7 +239,7 @@ end_error: Optional[str] = None
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info: Optional[
|
||||
Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
tuple[type[BaseException], BaseException, TracebackType]
|
||||
] = None
|
||||
|
||||
R = TypeVar("R")
|
||||
@@ -276,19 +276,25 @@ class Store(
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> List[Tuple]:
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]:
|
||||
def r(txn: LoggingTransaction) -> list[tuple]:
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
|
||||
return self.db_pool.runInteraction("execute_sql", r)
|
||||
|
||||
def insert_many_txn(
|
||||
self, txn: LoggingTransaction, table: str, headers: List[str], rows: List[Tuple]
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
headers: list[str],
|
||||
rows: list[tuple],
|
||||
override_system_value: bool = False,
|
||||
) -> None:
|
||||
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
||||
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
||||
table,
|
||||
", ".join(k for k in headers),
|
||||
"OVERRIDING SYSTEM VALUE" if override_system_value else "",
|
||||
", ".join("%s" for _ in headers),
|
||||
)
|
||||
|
||||
@@ -305,43 +311,31 @@ class Store(
|
||||
)
|
||||
|
||||
|
||||
class MockHomeserver:
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config: HomeServerConfig):
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server.server_name
|
||||
self.version_string = SYNAPSE_VERSION
|
||||
|
||||
def get_clock(self) -> Clock:
|
||||
return self.clock
|
||||
|
||||
def get_reactor(self) -> ISynapseReactor:
|
||||
return reactor
|
||||
|
||||
def get_instance_name(self) -> str:
|
||||
return "master"
|
||||
|
||||
def should_send_federation(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_replication_notifier(self) -> ReplicationNotifier:
|
||||
return ReplicationNotifier()
|
||||
super().__init__(
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
|
||||
class Porter:
|
||||
def __init__(
|
||||
self,
|
||||
sqlite_config: Dict[str, Any],
|
||||
sqlite_config: dict[str, Any],
|
||||
progress: "Progress",
|
||||
batch_size: int,
|
||||
hs_config: HomeServerConfig,
|
||||
hs: HomeServer,
|
||||
):
|
||||
self.sqlite_config = sqlite_config
|
||||
self.progress = progress
|
||||
self.batch_size = batch_size
|
||||
self.hs_config = hs_config
|
||||
self.hs = hs
|
||||
|
||||
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
|
||||
async def setup_table(self, table: str) -> tuple[str, int, int, int, int]:
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
row = await self.postgres_store.db_pool.simple_select_one(
|
||||
@@ -404,10 +398,10 @@ class Porter:
|
||||
|
||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
async def get_table_constraints(self) -> Dict[str, Set[str]]:
|
||||
async def get_table_constraints(self) -> dict[str, set[str]]:
|
||||
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
||||
|
||||
def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]:
|
||||
def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]:
|
||||
# We can pull the information about foreign key constraints out from
|
||||
# the postgres schema tables.
|
||||
sql = """
|
||||
@@ -423,7 +417,7 @@ class Porter:
|
||||
"""
|
||||
txn.execute(sql)
|
||||
|
||||
results: Dict[str, Set[str]] = {}
|
||||
results: dict[str, set[str]] = {}
|
||||
for table, foreign_table in txn:
|
||||
results.setdefault(table, set()).add(foreign_table)
|
||||
return results
|
||||
@@ -491,7 +485,7 @@ class Porter:
|
||||
|
||||
def r(
|
||||
txn: LoggingTransaction,
|
||||
) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]:
|
||||
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
@@ -508,7 +502,7 @@ class Porter:
|
||||
|
||||
if forward_rows or backward_rows:
|
||||
assert txn.description is not None
|
||||
headers: Optional[List[str]] = [
|
||||
headers: Optional[list[str]] = [
|
||||
column[0] for column in txn.description
|
||||
]
|
||||
else:
|
||||
@@ -532,7 +526,13 @@ class Porter:
|
||||
|
||||
def insert(txn: LoggingTransaction) -> None:
|
||||
assert headers is not None
|
||||
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn,
|
||||
table,
|
||||
headers[1:],
|
||||
rows,
|
||||
override_system_value=table in AUTOINCREMENT_TABLES,
|
||||
)
|
||||
|
||||
self.postgres_store.db_pool.simple_update_one_txn(
|
||||
txn,
|
||||
@@ -569,7 +569,7 @@ class Porter:
|
||||
|
||||
while True:
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select, (forward_chunk, self.batch_size))
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -653,15 +653,28 @@ class Porter:
|
||||
|
||||
engine = create_engine(db_config.config)
|
||||
|
||||
hs = MockHomeserver(self.hs_config)
|
||||
server_name = self.hs.hostname
|
||||
|
||||
with make_conn(db_config, engine, "portdb") as db_conn:
|
||||
with make_conn(
|
||||
db_config=db_config,
|
||||
engine=engine,
|
||||
default_txn_name="portdb",
|
||||
server_name=server_name,
|
||||
) as db_conn:
|
||||
engine.check_database(
|
||||
db_conn, allow_outdated_version=allow_outdated_version
|
||||
)
|
||||
prepare_database(db_conn, engine, config=self.hs_config)
|
||||
prepare_database(db_conn, engine, config=self.hs.config)
|
||||
# Type safety: ignore that we're using Mock homeservers here.
|
||||
store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) # type: ignore[arg-type]
|
||||
store = Store(
|
||||
DatabasePool(
|
||||
self.hs,
|
||||
db_config,
|
||||
engine,
|
||||
),
|
||||
db_conn,
|
||||
self.hs,
|
||||
)
|
||||
db_conn.commit()
|
||||
|
||||
return store
|
||||
@@ -758,7 +771,7 @@ class Porter:
|
||||
return
|
||||
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.database.get_single_database()
|
||||
self.hs.config.database.get_single_database()
|
||||
)
|
||||
|
||||
await self.remove_ignored_background_updates_from_database()
|
||||
@@ -884,6 +897,19 @@ class Porter:
|
||||
],
|
||||
)
|
||||
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connection_positions", "connection_position"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connection_required_state", "required_state_id"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"sliding_sync_connections", "connection_key"
|
||||
)
|
||||
await self._setup_autoincrement_sequence(
|
||||
"state_groups_pending_deletion", "sequence_number"
|
||||
)
|
||||
|
||||
# Step 3. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = await self.sqlite_store.db_pool.simple_select_onecol(
|
||||
@@ -925,7 +951,7 @@ class Porter:
|
||||
self.progress.set_state("Copying to postgres")
|
||||
|
||||
constraints = await self.get_table_constraints()
|
||||
tables_ported = set() # type: Set[str]
|
||||
tables_ported = set() # type: set[str]
|
||||
|
||||
while tables_to_port_info_map:
|
||||
# Pulls out all tables that are still to be ported and which
|
||||
@@ -964,8 +990,8 @@ class Porter:
|
||||
reactor.stop()
|
||||
|
||||
def _convert_rows(
|
||||
self, table: str, headers: List[str], rows: List[Tuple]
|
||||
) -> List[Tuple]:
|
||||
self, table: str, headers: list[str], rows: list[tuple]
|
||||
) -> list[tuple]:
|
||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||
|
||||
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
||||
@@ -999,7 +1025,7 @@ class Porter:
|
||||
|
||||
return outrows
|
||||
|
||||
async def _setup_sent_transactions(self) -> Tuple[int, int, int]:
|
||||
async def _setup_sent_transactions(self) -> tuple[int, int, int]:
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time() * 1000) - 86400000
|
||||
|
||||
@@ -1011,7 +1037,7 @@ class Porter:
|
||||
")"
|
||||
)
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select)
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -1081,14 +1107,14 @@ class Porter:
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> int:
|
||||
frows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
),
|
||||
)
|
||||
|
||||
brows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
),
|
||||
@@ -1105,7 +1131,7 @@ class Porter:
|
||||
|
||||
async def _get_total_count_to_port(
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> Tuple[int, int]:
|
||||
) -> tuple[int, int]:
|
||||
remaining, done = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
@@ -1190,7 +1216,7 @@ class Porter:
|
||||
async def _setup_sequence(
|
||||
self,
|
||||
sequence_name: str,
|
||||
stream_id_tables: Iterable[Tuple[str, str]],
|
||||
stream_id_tables: Iterable[tuple[str, str]],
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value."""
|
||||
current_stream_ids = []
|
||||
@@ -1216,6 +1242,49 @@ class Porter:
|
||||
"_setup_%s" % (sequence_name,), r
|
||||
)
|
||||
|
||||
async def _setup_autoincrement_sequence(
|
||||
self,
|
||||
sqlite_table_name: str,
|
||||
sqlite_id_column_name: str,
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value. Use where id column was declared with PRIMARY KEY AUTOINCREMENT."""
|
||||
seq_name = await self._pg_get_serial_sequence(
|
||||
sqlite_table_name, sqlite_id_column_name
|
||||
)
|
||||
if seq_name is None:
|
||||
raise Exception(
|
||||
"implicit sequence not found for table " + sqlite_table_name
|
||||
)
|
||||
|
||||
seq_value = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="sqlite_sequence",
|
||||
keyvalues={"name": sqlite_table_name},
|
||||
retcol="seq",
|
||||
allow_none=True,
|
||||
)
|
||||
if seq_value is None:
|
||||
return
|
||||
|
||||
def r(txn: LoggingTransaction) -> None:
|
||||
sql = "ALTER SEQUENCE %s RESTART WITH" % (seq_name,)
|
||||
txn.execute(sql + " %s", (seq_value + 1,))
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
||||
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
|
||||
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
||||
|
||||
def r(txn: LoggingTransaction) -> Optional[str]:
|
||||
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
||||
result = txn.fetchone()
|
||||
if not result:
|
||||
return None
|
||||
return result[0]
|
||||
|
||||
return await self.postgres_store.db_pool.runInteraction(
|
||||
"_pg_get_serial_sequence", r
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id: Optional[
|
||||
int
|
||||
@@ -1257,7 +1326,7 @@ class Progress:
|
||||
"""Used to report progress of the port"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.tables: Dict[str, TableProgress] = {}
|
||||
self.tables: dict[str, TableProgress] = {}
|
||||
|
||||
self.start_time = int(time.time())
|
||||
|
||||
@@ -1491,6 +1560,8 @@ def main() -> None:
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
hs = MockHomeserver(config)
|
||||
|
||||
def start(stdscr: Optional["curses.window"] = None) -> None:
|
||||
progress: Progress
|
||||
if stdscr:
|
||||
@@ -1502,15 +1573,14 @@ def main() -> None:
|
||||
sqlite_config=sqlite_config,
|
||||
progress=progress,
|
||||
batch_size=args.batch_size,
|
||||
hs_config=config,
|
||||
hs=hs,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run() -> Generator["defer.Deferred[Any]", Any, None]:
|
||||
with LoggingContext("synapse_port_db_run"):
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
@@ -28,11 +28,9 @@ import yaml
|
||||
from twisted.internet import defer, reactor as reactor_
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -48,7 +46,6 @@ class MockHomeserver(HomeServer):
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
|
||||
@@ -66,10 +63,13 @@ def run_background_updates(hs: HomeServer) -> None:
|
||||
def run() -> None:
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
hs.run_as_background_process(
|
||||
"background_updates",
|
||||
run_background_updates,
|
||||
)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
@@ -115,6 +115,13 @@ def main() -> None:
|
||||
# DB.
|
||||
hs.setup()
|
||||
|
||||
# This will cause all of the relevant storage classes to be instantiated and call
|
||||
# `register_background_update_handler(...)`,
|
||||
# `register_background_index_update(...)`,
|
||||
# `register_background_validate_constraint(...)`, etc so they are available to use
|
||||
# if we are asked to run those background updates.
|
||||
hs.get_storage_controllers()
|
||||
|
||||
if args.run_background_updates:
|
||||
run_background_updates(hs)
|
||||
|
||||
|
||||
@@ -18,12 +18,15 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
|
||||
from typing import TYPE_CHECKING, Optional, Protocol
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.types import Requester
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -33,6 +36,13 @@ if TYPE_CHECKING:
|
||||
GUEST_DEVICE_ID = "guest_device"
|
||||
|
||||
|
||||
introspection_response_timer = Histogram(
|
||||
"synapse_api_auth_delegated_introspection_response",
|
||||
"Time taken to get a response for an introspection request",
|
||||
labelnames=["code", SERVER_NAME_LABEL],
|
||||
)
|
||||
|
||||
|
||||
class Auth(Protocol):
|
||||
"""The interface that an auth provider must implement."""
|
||||
|
||||
@@ -41,7 +51,7 @@ class Auth(Protocol):
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -180,7 +190,7 @@ class Auth(Protocol):
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from netaddr import IPAddress
|
||||
|
||||
@@ -64,7 +64,7 @@ class BaseAuth:
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -114,7 +114,7 @@ class BaseAuth:
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
@@ -172,7 +172,7 @@ class BaseAuth:
|
||||
"""
|
||||
|
||||
# It's ok if the app service is trying to use the sender from their registration
|
||||
if app_service.sender == user_id:
|
||||
if app_service.sender.to_string() == user_id:
|
||||
pass
|
||||
# Check to make sure the app service is allowed to control the user
|
||||
elif not app_service.is_interested_in_user(user_id):
|
||||
@@ -302,12 +302,9 @@ class BaseAuth:
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
(the device_id URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
@@ -319,7 +316,8 @@ class BaseAuth:
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
# TODO: We can drop unstable support after 2026-01-01 (couple months after stable support)
|
||||
UNSTABLE_DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(access_token)
|
||||
if app_service is None:
|
||||
@@ -341,26 +339,24 @@ class BaseAuth:
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
effective_device_id_args = request.args.get(
|
||||
b"device_id", request.args.get(UNSTABLE_DEVICE_ID_ARG_NAME)
|
||||
)
|
||||
if effective_device_id_args:
|
||||
effective_device_id = effective_device_id_args[0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
effective_user_id, effective_device_id
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
Codes.UNKNOWN_DEVICE,
|
||||
)
|
||||
else:
|
||||
effective_device_id = None
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user