mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
11 Commits
anoa/updat
...
anoa/valid
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c13b80d6da | ||
|
|
e049a65e9e | ||
|
|
a2ef624153 | ||
|
|
34d6eba694 | ||
|
|
938536186c | ||
|
|
9c2d8fd6dd | ||
|
|
75a45e9ce6 | ||
|
|
f4c17c5a38 | ||
|
|
9d2cd9fe63 | ||
|
|
67d516d2a4 | ||
|
|
3c0c30ad7d |
1
changelog.d/17097.misc
Normal file
1
changelog.d/17097.misc
Normal file
@@ -0,0 +1 @@
|
||||
Extend validation of uploaded device keys.
|
||||
73
poetry.lock
generated
73
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
|
||||
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "authlib-1.6.3-py2.py3-none-any.whl", hash = "sha256:7ea0f082edd95a03b7b72edac65ec7f8f68d703017d7e37573aee4fc603f2a48"},
|
||||
{file = "authlib-1.6.3.tar.gz", hash = "sha256:9f7a982cc395de719e4c2215c5707e7ea690ecf84f1ab126f28c053f4219e610"},
|
||||
@@ -435,7 +435,7 @@ description = "XML bomb protection for Python stdlib modules"
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
|
||||
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
||||
@@ -460,7 +460,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
|
||||
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
|
||||
@@ -511,7 +511,7 @@ description = "Python wrapper for hiredis"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:add17efcbae46c5a6a13b244ff0b4a8fa079602ceb62290095c941b42e9d5dec"},
|
||||
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5fe955cc4f66c57df1ae8e5caf4de2925d43b5efab4e40859662311d1bcc5f54"},
|
||||
@@ -848,7 +848,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
|
||||
]
|
||||
@@ -986,7 +986,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
|
||||
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
|
||||
@@ -1002,7 +1002,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"url-preview\""
|
||||
markers = "extra == \"url-preview\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"},
|
||||
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"},
|
||||
@@ -1038,12 +1038,10 @@ files = [
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"},
|
||||
{file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"},
|
||||
@@ -1054,12 +1052,10 @@ files = [
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"},
|
||||
{file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"},
|
||||
@@ -1243,7 +1239,7 @@ description = "An LDAP3 auth provider for Synapse"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
|
||||
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
|
||||
@@ -1482,7 +1478,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
|
||||
]
|
||||
@@ -1551,8 +1547,6 @@ groups = ["main"]
|
||||
files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
|
||||
@@ -1562,8 +1556,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
|
||||
@@ -1573,8 +1565,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
|
||||
@@ -1587,8 +1577,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
|
||||
@@ -1598,8 +1586,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
|
||||
@@ -1609,8 +1595,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
|
||||
@@ -1620,8 +1604,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
|
||||
@@ -1631,8 +1613,6 @@ files = [
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
|
||||
@@ -1642,15 +1622,11 @@ files = [
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
|
||||
@@ -1688,7 +1664,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"postgres\""
|
||||
markers = "extra == \"postgres\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
|
||||
{file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
|
||||
@@ -1696,7 +1672,6 @@ files = [
|
||||
{file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"},
|
||||
{file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"},
|
||||
{file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"},
|
||||
{file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"},
|
||||
{file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"},
|
||||
{file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"},
|
||||
{file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
|
||||
@@ -1709,7 +1684,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
|
||||
]
|
||||
@@ -1725,7 +1700,7 @@ description = "A Simple library to enable psycopg2 compatability"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
|
||||
]
|
||||
@@ -1984,7 +1959,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"cache-memory\""
|
||||
markers = "extra == \"cache-memory\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
|
||||
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
|
||||
@@ -2044,7 +2019,7 @@ description = "Python implementation of SAML Version 2 Standard"
|
||||
optional = true
|
||||
python-versions = ">=3.9,<4.0"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
|
||||
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
|
||||
@@ -2069,7 +2044,7 @@ description = "Extensions to the standard Python datetime module"
|
||||
optional = true
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
@@ -2097,7 +2072,7 @@ description = "World timezone definitions, modern and historical"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
|
||||
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
|
||||
@@ -2463,7 +2438,7 @@ description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"sentry\""
|
||||
markers = "extra == \"sentry\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"},
|
||||
{file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"},
|
||||
@@ -2651,7 +2626,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
|
||||
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
|
||||
@@ -2667,7 +2642,7 @@ description = "Python bindings for the Apache Thrift RPC system"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
|
||||
]
|
||||
@@ -2729,7 +2704,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
|
||||
@@ -2866,7 +2841,7 @@ description = "non-blocking redis client for python"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
|
||||
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
|
||||
@@ -3112,7 +3087,7 @@ description = "An XML Schema validator and decoder"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
|
||||
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
|
||||
@@ -3256,4 +3231,4 @@ url-preview = ["lxml"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.9.0"
|
||||
content-hash = "2e8ea085e1a0c6f0ac051d4bc457a96827d01f621b1827086de01a5ffa98cf79"
|
||||
content-hash = "8783bfa1c998c4cf854e173b3f6745b0e21e655e0c24a8f9cda4be5d7375dc19"
|
||||
|
||||
@@ -224,7 +224,7 @@ matrix-common = "^1.3.0"
|
||||
packaging = ">=20.0"
|
||||
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
|
||||
# See https://github.com/matrix-org/synapse/issues/15858
|
||||
pydantic = ">=1.7.4, <3"
|
||||
pydantic = ">=2.0.0, <3"
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
|
||||
@@ -1,478 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
"""
|
||||
A script which enforces that Synapse always uses strict types when defining a Pydantic
|
||||
model.
|
||||
|
||||
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
|
||||
|
||||
https://github.com/pydantic/pydantic/issues/1098
|
||||
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
|
||||
|
||||
until then, this script is a best effort to stop us from introducing type coersion bugs
|
||||
(like the infamous stringy power levels fixed in room version 10).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import functools
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
import unittest.mock
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from parameterized import parameterized
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
get_args,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||
constr,
|
||||
conbytes,
|
||||
conint,
|
||||
confloat,
|
||||
]
|
||||
|
||||
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
|
||||
str,
|
||||
bytes,
|
||||
int,
|
||||
float,
|
||||
bool,
|
||||
]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class ModelCheckerException(Exception):
|
||||
"""Dummy exception. Allows us to detect unwanted types during a module import."""
|
||||
|
||||
|
||||
class MissingStrictInConstrainedTypeException(ModelCheckerException):
|
||||
factory_name: str
|
||||
|
||||
def __init__(self, factory_name: str):
|
||||
self.factory_name = factory_name
|
||||
|
||||
|
||||
class FieldHasUnwantedTypeException(ModelCheckerException):
|
||||
message: str
|
||||
|
||||
def __init__(self, message: str):
|
||||
self.message = message
|
||||
|
||||
|
||||
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
|
||||
"""We patch `constr` and friends with wrappers that enforce strict=True."""
|
||||
|
||||
@functools.wraps(factory)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if "strict" not in kwargs:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
if not kwargs["strict"]:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
return factory(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def field_type_unwanted(type_: Any) -> bool:
|
||||
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
|
||||
|
||||
At present, we exclude types which will coerce, or any generic type involving types
|
||||
which will coerce."""
|
||||
logger.debug("Is %s unwanted?")
|
||||
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
|
||||
logger.debug("yes")
|
||||
return True
|
||||
logger.debug("Maybe. Subargs are %s", get_args(type_))
|
||||
rv = any(field_type_unwanted(t) for t in get_args(type_))
|
||||
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
|
||||
return rv
|
||||
|
||||
|
||||
class PatchedBaseModel(PydanticBaseModel):
|
||||
"""A patched version of BaseModel that inspects fields after models are defined.
|
||||
|
||||
We complain loudly if we see an unwanted type.
|
||||
|
||||
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
|
||||
for field in cls.__fields__.values():
|
||||
# Note that field.type_ and field.outer_type are computed based on the
|
||||
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||
if field_type_unwanted(field.outer_type_):
|
||||
# TODO: this only reports the first bad field. Can we find all bad ones
|
||||
# and report them all?
|
||||
raise FieldHasUnwantedTypeException(
|
||||
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
|
||||
f"with unwanted type `{field.outer_type_}`"
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def monkeypatch_pydantic() -> Generator[None, None, None]:
|
||||
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
|
||||
|
||||
If the snooping functions see something they don't like, they'll raise a
|
||||
ModelCheckingException instance.
|
||||
"""
|
||||
with contextlib.ExitStack() as patches:
|
||||
# Most Synapse code ought to import the patched objects directly from
|
||||
# `pydantic`. But we also patch their containing modules `pydantic.main` and
|
||||
# `pydantic.types` for completeness.
|
||||
patch_basemodel = unittest.mock.patch(
|
||||
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
|
||||
)
|
||||
patches.enter_context(patch_basemodel)
|
||||
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
|
||||
wrapper: Callable = make_wrapper(factory)
|
||||
patch = unittest.mock.patch(
|
||||
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
|
||||
)
|
||||
patches.enter_context(patch)
|
||||
yield
|
||||
|
||||
|
||||
def format_model_checker_exception(e: ModelCheckerException) -> str:
|
||||
"""Work out which line of code caused e. Format the line in a human-friendly way."""
|
||||
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
|
||||
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
|
||||
# with strict=False? There is some difficulty with the inheritance hierarchy
|
||||
# because StrictStr < ConstrainedStr < str.
|
||||
if isinstance(e, FieldHasUnwantedTypeException):
|
||||
return e.message
|
||||
elif isinstance(e, MissingStrictInConstrainedTypeException):
|
||||
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
|
||||
return (
|
||||
f"Missing `strict=True` from {e.factory_name}() call \n"
|
||||
+ traceback.format_list([frame_summary])[0].lstrip()
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown exception {e}") from e
|
||||
|
||||
|
||||
def lint() -> int:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
|
||||
|
||||
Print any problems, then return a status code suitable for sys.exit."""
|
||||
failures = do_lint()
|
||||
if failures:
|
||||
print(f"Found {len(failures)} problem(s)")
|
||||
for failure in sorted(failures):
|
||||
print(failure)
|
||||
return os.EX_DATAERR if failures else os.EX_OK
|
||||
|
||||
|
||||
def do_lint() -> Set[str]:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||
failures = set()
|
||||
|
||||
with monkeypatch_pydantic():
|
||||
logger.debug("Importing synapse")
|
||||
try:
|
||||
# TODO: make "synapse" an argument so we can target this script at
|
||||
# a subpackage
|
||||
module = importlib.import_module("synapse")
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when importing synapse")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
try:
|
||||
logger.debug("Fetching subpackages")
|
||||
module_infos = list(
|
||||
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
|
||||
)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when looking for modules to import")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
for module_info in module_infos:
|
||||
logger.debug("Importing %s", module_info.name)
|
||||
try:
|
||||
importlib.import_module(module_info.name)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning(
|
||||
"Bad annotation found when importing %s", module_info.name
|
||||
)
|
||||
failures.add(format_model_checker_exception(e))
|
||||
|
||||
return failures
|
||||
|
||||
|
||||
def run_test_snippet(source: str) -> None:
|
||||
"""Exec a snippet of source code in an isolated environment."""
|
||||
# To emulate `source` being called at the top level of the module,
|
||||
# the globals and locals we provide apparently have to be the same mapping.
|
||||
#
|
||||
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||
# > If exec gets two separate objects as globals and locals, the code will be
|
||||
# > executed as if it were embedded in a class definition.
|
||||
globals_: Dict[str, object]
|
||||
locals_: Dict[str, object]
|
||||
globals_ = locals_ = {}
|
||||
exec(textwrap.dedent(source), globals_, locals_)
|
||||
|
||||
|
||||
class TestConstrainedTypesPatch(unittest.TestCase):
|
||||
def test_expression_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_called_as_module_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
import pydantic
|
||||
pydantic.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_wildcard_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.types import constr
|
||||
except ImportError:
|
||||
from pydantic.types import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import types as pydantic_types
|
||||
except ImportError:
|
||||
from pydantic import types as pydantic_types
|
||||
pydantic_types.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_but_no_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(min_length=10)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_False_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=False)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_True_doesnt_raise(self) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=True)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
x: constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import BaseModel, conint
|
||||
except ImportError:
|
||||
from pydantic import BaseModel, conint
|
||||
class C:
|
||||
x: conint()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class TestFieldTypeInspection(unittest.TestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
("str",),
|
||||
("bytes"),
|
||||
("int",),
|
||||
("float",),
|
||||
("bool"),
|
||||
("Optional[str]",),
|
||||
("Union[None, str]",),
|
||||
("List[str]",),
|
||||
("List[List[str]]",),
|
||||
("Dict[StrictStr, str]",),
|
||||
("Dict[str, StrictStr]",),
|
||||
("TypedDict('D', x=int)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("StrictStr",),
|
||||
("StrictBytes"),
|
||||
("StrictInt",),
|
||||
("StrictFloat",),
|
||||
("StrictBool"),
|
||||
("constr(strict=True, min_length=10)",),
|
||||
("Optional[StrictStr]",),
|
||||
("Union[None, StrictStr]",),
|
||||
("List[StrictStr]",),
|
||||
("List[List[StrictStr]]",),
|
||||
("Dict[StrictStr, StrictStr]",),
|
||||
("TypedDict('D', x=StrictInt)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_holding_str_raises_with_alternative_import(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.main import BaseModel
|
||||
except ImportError:
|
||||
from pydantic.main import BaseModel
|
||||
class C(BaseModel):
|
||||
f: str
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
|
||||
parser.add_argument("-v", "--verbose", action="store_true")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
|
||||
level=logging.DEBUG if args.verbose else logging.INFO,
|
||||
)
|
||||
# suppress logs we don't care about
|
||||
logging.getLogger("xmlschema").setLevel(logging.WARNING)
|
||||
if args.mode == "lint":
|
||||
sys.exit(lint())
|
||||
elif args.mode == "test":
|
||||
unittest.main(argv=sys.argv[:1])
|
||||
@@ -134,9 +134,6 @@ fi
|
||||
# Ensure the formatting of Rust code.
|
||||
cargo-fmt
|
||||
|
||||
# Ensure all Pydantic models use strict types.
|
||||
./scripts-dev/check_pydantic_models.py lint
|
||||
|
||||
# Ensure type hints are correct.
|
||||
mypy
|
||||
|
||||
|
||||
@@ -20,12 +20,22 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Tuple
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import attr
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.key import VerifyKey, decode_verify_key_bytes
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
from typing_extensions import TypeAlias
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
from twisted.internet import defer
|
||||
@@ -57,10 +67,50 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ONE_TIME_KEY_UPLOAD = "one_time_key_upload_lock"
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class DeviceKeys:
|
||||
algorithms: List[str]
|
||||
"""The encryption algorithms supported by this device."""
|
||||
|
||||
device_id: str
|
||||
"""The ID of the device these keys belong to. Must match the device ID used when logging in."""
|
||||
|
||||
keys: Mapping[str, str]
|
||||
"""
|
||||
Public identity keys. The names of the properties should be in the
|
||||
format `<algorithm>:<device_id>`. The keys themselves should be encoded as
|
||||
specified by the key algorithm.
|
||||
"""
|
||||
|
||||
signatures: Mapping[UserID, Mapping[str, str]]
|
||||
"""Signatures for the device key object. A map from user ID, to a map from "<algorithm>:<device_id>" to the signature."""
|
||||
|
||||
user_id: UserID
|
||||
"""The ID of the user the device belongs to. Must match the user ID used when logging in."""
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class KeyObject:
|
||||
key: str
|
||||
"""The key, encoded using unpadded base64."""
|
||||
|
||||
signatures: Mapping[UserID, Mapping[str, str]]
|
||||
"""Signature for the device. Mapped from user ID to another map of key signing identifier to the signature itself.
|
||||
|
||||
See the following for more detail: https://spec.matrix.org/v1.16/appendices/#signing-details
|
||||
"""
|
||||
|
||||
fallback: bool = False
|
||||
"""Whether this is a fallback key."""
|
||||
|
||||
|
||||
FallbackKeys: TypeAlias = Mapping[str, Union[str, KeyObject]]
|
||||
OneTimeKeys: TypeAlias = Mapping[str, Union[str, KeyObject]]
|
||||
|
||||
|
||||
class E2eKeysHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.config = hs.config
|
||||
@@ -834,7 +884,12 @@ class E2eKeysHandler:
|
||||
|
||||
@tag_args
|
||||
async def upload_keys_for_user(
|
||||
self, user_id: str, device_id: str, keys: JsonDict
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
device_keys: Optional[DeviceKeys],
|
||||
fallback_keys: Optional[FallbackKeys],
|
||||
one_time_keys: Optional[OneTimeKeys],
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Args:
|
||||
@@ -848,16 +903,27 @@ class E2eKeysHandler:
|
||||
"""
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
# TODO: Validate the JSON to make sure it has the right keys.
|
||||
device_keys = keys.get("device_keys", None)
|
||||
if device_keys:
|
||||
await self.upload_device_keys_for_user(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
keys={"device_keys": device_keys},
|
||||
)
|
||||
# Validate that user_id and device_id match the requesting user
|
||||
if (
|
||||
device_keys.user_id.to_string() == user_id
|
||||
and device_keys.device_id == device_id
|
||||
):
|
||||
await self.upload_device_keys_for_user(
|
||||
user_id,
|
||||
device_id,
|
||||
device_keys,
|
||||
)
|
||||
else:
|
||||
log_kv(
|
||||
{
|
||||
"message": "Not updating device_keys for user, user_id or device_id mismatch",
|
||||
"user_id": user_id,
|
||||
}
|
||||
)
|
||||
else:
|
||||
log_kv({"message": "Did not update device_keys", "reason": "not a dict"})
|
||||
|
||||
one_time_keys = keys.get("one_time_keys", None)
|
||||
if one_time_keys:
|
||||
log_kv(
|
||||
{
|
||||
@@ -869,14 +935,14 @@ class E2eKeysHandler:
|
||||
await self._upload_one_time_keys_for_user(
|
||||
user_id, device_id, time_now, one_time_keys
|
||||
)
|
||||
elif one_time_keys:
|
||||
log_kv({"message": "Did not update device_keys", "reason": "not a dict"})
|
||||
else:
|
||||
log_kv(
|
||||
{"message": "Did not update one_time_keys", "reason": "no keys given"}
|
||||
)
|
||||
fallback_keys = keys.get("fallback_keys") or keys.get(
|
||||
"org.matrix.msc2732.fallback_keys"
|
||||
)
|
||||
if fallback_keys and isinstance(fallback_keys, dict):
|
||||
|
||||
if fallback_keys:
|
||||
log_kv(
|
||||
{
|
||||
"message": "Updating fallback_keys for device.",
|
||||
@@ -885,8 +951,6 @@ class E2eKeysHandler:
|
||||
}
|
||||
)
|
||||
await self.store.set_e2e_fallback_keys(user_id, device_id, fallback_keys)
|
||||
elif fallback_keys:
|
||||
log_kv({"message": "Did not update fallback_keys", "reason": "not a dict"})
|
||||
else:
|
||||
log_kv(
|
||||
{"message": "Did not update fallback_keys", "reason": "no keys given"}
|
||||
@@ -899,7 +963,10 @@ class E2eKeysHandler:
|
||||
|
||||
@tag_args
|
||||
async def upload_device_keys_for_user(
|
||||
self, user_id: str, device_id: str, keys: JsonDict
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
device_keys: DeviceKeys,
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
@@ -910,7 +977,6 @@ class E2eKeysHandler:
|
||||
"""
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
device_keys = keys["device_keys"]
|
||||
logger.info(
|
||||
"Updating device_keys for device %r for user %s at %d",
|
||||
device_id,
|
||||
@@ -940,7 +1006,11 @@ class E2eKeysHandler:
|
||||
await self.device_handler.check_device_registered(user_id, device_id)
|
||||
|
||||
async def _upload_one_time_keys_for_user(
|
||||
self, user_id: str, device_id: str, time_now: int, one_time_keys: JsonDict
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
time_now: int,
|
||||
one_time_keys: OneTimeKeys,
|
||||
) -> None:
|
||||
# We take out a lock so that we don't have to worry about a client
|
||||
# sending duplicate requests.
|
||||
@@ -1727,20 +1797,20 @@ def _exception_to_failure(e: Exception) -> JsonDict:
|
||||
return {"status": 503, "message": str(e)}
|
||||
|
||||
|
||||
def _one_time_keys_match(old_key_json: str, new_key: JsonDict) -> bool:
|
||||
def _one_time_keys_match(old_key_json: str, new_key: Union[str, KeyObject]) -> bool:
|
||||
old_key = json_decoder.decode(old_key_json)
|
||||
|
||||
# if either is a string rather than an object, they must match exactly
|
||||
if not isinstance(old_key, dict) or not isinstance(new_key, dict):
|
||||
if isinstance(old_key, str) or isinstance(new_key, str):
|
||||
return old_key == new_key
|
||||
|
||||
# otherwise, we strip off the 'signatures' if any, because it's legitimate
|
||||
# for different upload attempts to have different signatures.
|
||||
old_key.pop("signatures", None)
|
||||
new_key_copy = dict(new_key)
|
||||
new_key_copy.pop("signatures", None)
|
||||
# new_key must be a `KeyObject`
|
||||
|
||||
return old_key == new_key_copy
|
||||
# Otherwise, check whether the embedded keys match.
|
||||
#
|
||||
# We ignore signatures, because it's legitimate for different upload
|
||||
# attempts to have different signatures.
|
||||
return old_key["key"] == new_key.key
|
||||
|
||||
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
|
||||
@@ -185,46 +185,6 @@ class ReplicationMultiUserDevicesResyncRestServlet(ReplicationEndpoint):
|
||||
return 200, multi_user_devices
|
||||
|
||||
|
||||
# FIXME(2025-07-22): Remove this on the next release, this will only get used
|
||||
# during rollout to Synapse 1.135 and can be removed after that release.
|
||||
class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint):
|
||||
"""Unused endpoint, kept for backwards compatibility during rollout."""
|
||||
|
||||
NAME = "upload_keys_for_user"
|
||||
PATH_ARGS = ()
|
||||
CACHE = False
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
self.store = hs.get_datastores().main
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
@staticmethod
|
||||
async def _serialize_payload( # type: ignore[override]
|
||||
user_id: str, device_id: str, keys: JsonDict
|
||||
) -> JsonDict:
|
||||
return {
|
||||
"user_id": user_id,
|
||||
"device_id": device_id,
|
||||
"keys": keys,
|
||||
}
|
||||
|
||||
async def _handle_request( # type: ignore[override]
|
||||
self, request: Request, content: JsonDict
|
||||
) -> Tuple[int, JsonDict]:
|
||||
user_id = content["user_id"]
|
||||
device_id = content["device_id"]
|
||||
keys = content["keys"]
|
||||
|
||||
results = await self.e2e_keys_handler.upload_keys_for_user(
|
||||
user_id, device_id, keys
|
||||
)
|
||||
|
||||
return 200, results
|
||||
|
||||
|
||||
class ReplicationHandleNewDeviceUpdateRestServlet(ReplicationEndpoint):
|
||||
"""Wake up a device writer to send local device list changes as federation outbound pokes.
|
||||
|
||||
@@ -291,5 +251,4 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ReplicationNotifyUserSignatureUpdateRestServlet(hs).register(http_server)
|
||||
ReplicationMultiUserDevicesResyncRestServlet(hs).register(http_server)
|
||||
ReplicationHandleNewDeviceUpdateRestServlet(hs).register(http_server)
|
||||
ReplicationUploadKeysForUserRestServlet(hs).register(http_server)
|
||||
ReplicationDeviceHandleRoomUnPartialStated(hs).register(http_server)
|
||||
|
||||
@@ -23,7 +23,25 @@
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Annotated,
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pydantic import (
|
||||
AfterValidator,
|
||||
AliasChoices,
|
||||
Field,
|
||||
StrictBool,
|
||||
StrictStr,
|
||||
)
|
||||
|
||||
from synapse.api.auth.mas import MasDelegatedAuth
|
||||
from synapse.api.errors import (
|
||||
@@ -31,9 +49,16 @@ from synapse.api.errors import (
|
||||
InvalidAPICallError,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.handlers.e2e_keys import (
|
||||
DeviceKeys,
|
||||
FallbackKeys,
|
||||
KeyObject,
|
||||
OneTimeKeys,
|
||||
)
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_and_validate_json_object_from_request,
|
||||
parse_integer,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
@@ -41,7 +66,8 @@ from synapse.http.servlet import (
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import log_kv, set_tag
|
||||
from synapse.rest.client._base import client_patterns, interactive_auth_handler
|
||||
from synapse.types import JsonDict, StreamToken
|
||||
from synapse.types import JsonDict, StreamToken, UserIDType
|
||||
from synapse.types.rest import RequestBodyModel
|
||||
from synapse.util.cancellation import cancellable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -111,12 +137,94 @@ class KeyUploadServlet(RestServlet):
|
||||
self._clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
|
||||
class KeyUploadRequestBody(RequestBodyModel):
|
||||
"""
|
||||
The body of a `POST /_matrix/client/v3/keys/upload` request.
|
||||
|
||||
Based on https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload.
|
||||
"""
|
||||
|
||||
class DeviceKeys(RequestBodyModel):
|
||||
algorithms: List[StrictStr]
|
||||
"""The encryption algorithms supported by this device."""
|
||||
|
||||
device_id: StrictStr
|
||||
"""The ID of the device these keys belong to. Must match the device ID used when logging in."""
|
||||
|
||||
keys: Mapping[StrictStr, StrictStr]
|
||||
"""
|
||||
Public identity keys. The names of the properties should be in the
|
||||
format `<algorithm>:<device_id>`. The keys themselves should be encoded as
|
||||
specified by the key algorithm.
|
||||
"""
|
||||
|
||||
signatures: Mapping[UserIDType, Mapping[StrictStr, StrictStr]]
|
||||
"""Signatures for the device key object. A map from user ID, to a map from "<algorithm>:<device_id>" to the signature."""
|
||||
|
||||
user_id: UserIDType
|
||||
"""The ID of the user the device belongs to. Must match the user ID used when logging in."""
|
||||
|
||||
class KeyObject(RequestBodyModel):
|
||||
key: StrictStr
|
||||
"""The key, encoded using unpadded base64."""
|
||||
|
||||
# TODO: Is this only allowed on fallback keys?
|
||||
fallback: StrictBool = False
|
||||
"""Whether this is a fallback key."""
|
||||
|
||||
signatures: Mapping[UserIDType, Mapping[StrictStr, StrictStr]]
|
||||
"""Signature for the device. Mapped from user ID to another map of key signing identifier to the signature itself.
|
||||
|
||||
See the following for more detail: https://spec.matrix.org/v1.16/appendices/#signing-details
|
||||
"""
|
||||
|
||||
device_keys: Optional[DeviceKeys] = None
|
||||
"""Identity keys for the device. May be absent if no new identity keys are required."""
|
||||
|
||||
fallback_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] = (
|
||||
Field(
|
||||
default_factory=lambda: None,
|
||||
validation_alias=AliasChoices(
|
||||
"fallback_keys",
|
||||
# Accept this field alias, which is the unstable equivalent to
|
||||
# the `fallback_keys` field from MSC2732.
|
||||
"org.matrix.msc2732.fallback_keys",
|
||||
),
|
||||
serialization_alias="fallback_keys",
|
||||
)
|
||||
)
|
||||
"""
|
||||
The public key which should be used if the device's one-time keys are
|
||||
exhausted. The fallback key is not deleted once used, but should be
|
||||
replaced when additional one-time keys are being uploaded. The server
|
||||
will notify the client of the fallback key being used through `/sync`.
|
||||
|
||||
There can only be at most one key per algorithm uploaded, and the server
|
||||
will only persist one key per algorithm.
|
||||
|
||||
When uploading a signed key, an additional fallback: true key should be
|
||||
included to denote that the key is a fallback key.
|
||||
|
||||
May be absent if a new fallback key is not required.
|
||||
"""
|
||||
|
||||
one_time_keys: Optional[Mapping[StrictStr, Union[StrictStr, KeyObject]]] = None
|
||||
"""
|
||||
One-time public keys for “pre-key” messages. The names of the properties
|
||||
should be in the format `<algorithm>:<key_id>`.
|
||||
|
||||
The format of the key is determined by the key algorithm, see:
|
||||
https://spec.matrix.org/v1.16/client-server-api/#key-algorithms.
|
||||
"""
|
||||
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, device_id: Optional[str]
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user_id = requester.user.to_string()
|
||||
body = parse_json_object_from_request(request)
|
||||
body = parse_and_validate_json_object_from_request(
|
||||
request, self.KeyUploadRequestBody
|
||||
)
|
||||
|
||||
if device_id is not None:
|
||||
# Providing the device_id should only be done for setting keys
|
||||
@@ -149,12 +257,75 @@ class KeyUploadServlet(RestServlet):
|
||||
400, "To upload keys, you must pass device_id when authenticating"
|
||||
)
|
||||
|
||||
# Map the pydantic model to domain objects.
|
||||
device_keys, fallback_keys, one_time_keys = (
|
||||
self._map_pydantic_model_to_domain_objects(body)
|
||||
)
|
||||
|
||||
result = await self.e2e_keys_handler.upload_keys_for_user(
|
||||
user_id=user_id, device_id=device_id, keys=body
|
||||
user_id,
|
||||
device_id,
|
||||
device_keys,
|
||||
fallback_keys,
|
||||
one_time_keys,
|
||||
)
|
||||
|
||||
return 200, result
|
||||
|
||||
def _map_pydantic_model_to_domain_objects(
|
||||
self, body: KeyUploadRequestBody
|
||||
) -> Tuple[
|
||||
Optional[DeviceKeys],
|
||||
Optional[FallbackKeys],
|
||||
Optional[OneTimeKeys],
|
||||
]:
|
||||
"""Map a validated pydantic model to internal data classes."""
|
||||
device_keys: Optional[DeviceKeys] = None
|
||||
if body.device_keys is not None:
|
||||
device_keys = DeviceKeys(
|
||||
algorithms=body.device_keys.algorithms,
|
||||
device_id=body.device_keys.device_id,
|
||||
keys=body.device_keys.keys,
|
||||
signatures=body.device_keys.signatures,
|
||||
user_id=body.device_keys.user_id,
|
||||
)
|
||||
|
||||
fallback_keys: Optional[FallbackKeys] = None
|
||||
if body.fallback_keys is not None:
|
||||
fallback_keys = {}
|
||||
for (
|
||||
algorithm_and_key_id,
|
||||
public_key_or_object,
|
||||
) in body.fallback_keys.items():
|
||||
if isinstance(public_key_or_object, str):
|
||||
fallback_keys[algorithm_and_key_id] = public_key_or_object
|
||||
else:
|
||||
fallback_key_object: KeyUploadServlet.KeyUploadRequestBody.KeyObject = public_key_or_object
|
||||
fallback_keys[algorithm_and_key_id] = KeyObject(
|
||||
key=fallback_key_object.key,
|
||||
signatures=fallback_key_object.signatures,
|
||||
fallback=fallback_key_object.fallback,
|
||||
)
|
||||
|
||||
one_time_keys: Optional[OneTimeKeys] = None
|
||||
if body.one_time_keys is not None:
|
||||
one_time_keys = {}
|
||||
for (
|
||||
algorithm_and_key_id,
|
||||
public_key_or_object,
|
||||
) in body.one_time_keys.items():
|
||||
if isinstance(public_key_or_object, str):
|
||||
one_time_keys[algorithm_and_key_id] = public_key_or_object
|
||||
else:
|
||||
one_time_key_object: KeyUploadServlet.KeyUploadRequestBody.KeyObject = public_key_or_object
|
||||
one_time_keys[algorithm_and_key_id] = KeyObject(
|
||||
key=one_time_key_object.key,
|
||||
signatures=one_time_key_object.signatures,
|
||||
fallback=one_time_key_object.fallback,
|
||||
)
|
||||
|
||||
return device_keys, fallback_keys, one_time_keys
|
||||
|
||||
|
||||
class KeyQueryServlet(RestServlet):
|
||||
"""
|
||||
@@ -381,6 +552,105 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
|
||||
class SigningKeyUploadRequestBody(RequestBodyModel):
|
||||
"""
|
||||
The body of a `POST /_matrix/client/v3/keys/device_signing/upload` request.
|
||||
|
||||
Based on https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysdevice_signingupload.
|
||||
"""
|
||||
|
||||
class AuthenticationData(RequestBodyModel):
|
||||
session: StrictStr
|
||||
"""The value of the session key given by the homeserver."""
|
||||
|
||||
type: StrictStr
|
||||
"""
|
||||
The authentication type that the client is attempting to
|
||||
complete.
|
||||
|
||||
May be omitted if session is given, and the client is reissuing a
|
||||
request which it believes has been completed out-of-band (for
|
||||
example, via the fallback mechanism; see
|
||||
https://spec.matrix.org/v1.16/client-server-api/#fallback).
|
||||
"""
|
||||
|
||||
# TODO: Other types...
|
||||
|
||||
# TODO: Make this a before type so that we can transform it into a single PublicKey?
|
||||
@staticmethod
|
||||
def validate_public_key(
|
||||
public_key_object: Mapping[str, str],
|
||||
) -> Mapping[str, str]:
|
||||
"""Validates that the given mapping contains:
|
||||
* Exactly one property.
|
||||
* The name is in the form "x:y" and the value is in the form "y".
|
||||
"""
|
||||
if len(public_key_object) != 1:
|
||||
raise ValueError("Exactly one public key may be provided")
|
||||
|
||||
algorithm_and_ub64_pk, unpadded_base64_public_key = next(
|
||||
iter(public_key_object.items())
|
||||
)
|
||||
if (
|
||||
":" not in algorithm_and_ub64_pk
|
||||
or len(algorithm_and_ub64_pk.split(":")) != 2
|
||||
):
|
||||
raise ValueError(
|
||||
"Property of public key is not in the form `<algorithm>:<unpadded_base64_public_key>`"
|
||||
)
|
||||
_algorithm, ub64_pk = algorithm_and_ub64_pk.split(":")
|
||||
|
||||
if ub64_pk != unpadded_base64_public_key:
|
||||
raise ValueError(
|
||||
"Unpadded base64 public key in property and value portions of public key object do not match"
|
||||
)
|
||||
|
||||
return public_key_object
|
||||
|
||||
PublicKey = Annotated[
|
||||
Mapping[StrictStr, StrictStr], AfterValidator(validate_public_key)
|
||||
]
|
||||
"""A public key.
|
||||
|
||||
The object must have exactly one property, whose name is in the form
|
||||
`<algorithm>:<unpadded_base64_public_key>`, and whose value is the
|
||||
unpadded base64 public key.
|
||||
"""
|
||||
|
||||
class CrossSigningKey(RequestBodyModel):
|
||||
keys: Mapping[StrictStr, StrictStr]
|
||||
"""The public key."""
|
||||
|
||||
signatures: Optional[Mapping[UserIDType, Mapping[StrictStr, StrictStr]]]
|
||||
# TODO: Optional for the master key, required for other keys. Subclass for master crosssigningkey?
|
||||
"""Signatures of the key. Optional for the master key. Other keys must be signed by the user's master key."""
|
||||
|
||||
usage: List[StrictStr]
|
||||
"""What the key is used for."""
|
||||
|
||||
user_id: UserIDType
|
||||
"""The ID of the user the key belongs to."""
|
||||
|
||||
auth: AuthenticationData
|
||||
"""Additional authentication information for the user-interactive authentication API."""
|
||||
|
||||
master_key: Optional[CrossSigningKey]
|
||||
"""The user's master key."""
|
||||
|
||||
self_signing_key: Optional[CrossSigningKey]
|
||||
"""
|
||||
The user's self-signing key. Must be signed by the accompanying
|
||||
master key, or by the user's most recently uploaded master key if no
|
||||
master key is included in the request.
|
||||
"""
|
||||
|
||||
user_signing_key: Optional[CrossSigningKey]
|
||||
"""
|
||||
The user's user-signing key. Must be signed by the accompanying master
|
||||
key, or by the user's most recently uploaded master key if no master key
|
||||
is included in the request.
|
||||
"""
|
||||
|
||||
@interactive_auth_handler
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
@@ -67,7 +67,7 @@ from synapse.util.cancellation import cancellable
|
||||
from synapse.util.iterutils import batch_iter
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.handlers.e2e_keys import SignatureListItem
|
||||
from synapse.handlers.e2e_keys import DeviceKeys, FallbackKeys, SignatureListItem
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
@@ -802,7 +802,10 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||
)
|
||||
|
||||
async def set_e2e_fallback_keys(
|
||||
self, user_id: str, device_id: str, fallback_keys: JsonDict
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
fallback_keys: "FallbackKeys",
|
||||
) -> None:
|
||||
"""Set the user's e2e fallback keys.
|
||||
|
||||
@@ -829,7 +832,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||
txn: LoggingTransaction,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
fallback_keys: JsonDict,
|
||||
fallback_keys: "FallbackKeys",
|
||||
) -> None:
|
||||
"""Set the user's e2e fallback keys.
|
||||
|
||||
@@ -1650,16 +1653,20 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||
)
|
||||
|
||||
async def set_e2e_device_keys(
|
||||
self, user_id: str, device_id: str, time_now: int, device_keys: JsonDict
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
time_now: int,
|
||||
device_keys: "DeviceKeys",
|
||||
) -> bool:
|
||||
"""Stores device keys for a device. Returns whether there was a change
|
||||
or the keys were already in the database.
|
||||
|
||||
Args:
|
||||
user_id: user_id of the user to store keys for
|
||||
device_id: device_id of the device to store keys for
|
||||
time_now: time at the request to store the keys
|
||||
device_keys: the keys to store
|
||||
Args:
|
||||
user_id: user_id of the user to store keys for
|
||||
device_id: device_id of the device to store keys for
|
||||
time_now: time at the request to store the keys
|
||||
device_keys: the keys to store
|
||||
"""
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
@@ -1677,7 +1684,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||
user_id: str,
|
||||
device_id: str,
|
||||
time_now: int,
|
||||
device_keys: JsonDict,
|
||||
device_keys: "DeviceKeys",
|
||||
) -> bool:
|
||||
"""Stores device keys for a device. Returns whether there was a change
|
||||
or the keys were already in the database.
|
||||
|
||||
@@ -27,6 +27,7 @@ from enum import Enum
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
AbstractSet,
|
||||
Annotated,
|
||||
Any,
|
||||
ClassVar,
|
||||
Dict,
|
||||
@@ -48,6 +49,7 @@ from typing import (
|
||||
|
||||
import attr
|
||||
from immutabledict import immutabledict
|
||||
from pydantic import BeforeValidator, PlainSerializer, WithJsonSchema
|
||||
from signedjson.key import decode_verify_key_bytes
|
||||
from signedjson.types import VerifyKey
|
||||
from typing_extensions import Self
|
||||
@@ -361,6 +363,37 @@ class RoomIdWithDomain(DomainSpecificString):
|
||||
SIGIL = "!"
|
||||
|
||||
|
||||
def _parse_user_id(user_id_str: Any) -> Any:
|
||||
if isinstance(user_id_str, str):
|
||||
try:
|
||||
return UserID.from_string(user_id_str)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Unable to parse string '{user_id_str}' as valid Matrix User ID"
|
||||
) from e
|
||||
raise ValueError(f"Expected a string, found {type(user_id_str)}")
|
||||
|
||||
|
||||
UserIDType = Annotated[
|
||||
UserID,
|
||||
BeforeValidator(_parse_user_id),
|
||||
PlainSerializer(lambda uid: uid.to_string(), return_type=str),
|
||||
WithJsonSchema(
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Matrix User ID",
|
||||
"pattern": r"^@[^:]+:[^:]+$",
|
||||
"examples": ["@alice:example.org"],
|
||||
}
|
||||
),
|
||||
]
|
||||
"""
|
||||
A User ID type that can be used in Pydantic models.
|
||||
|
||||
Validates that the input value is a `str` and can be parsed as a Matrix User ID.
|
||||
"""
|
||||
|
||||
|
||||
# the set of urlsafe base64 characters, no padding.
|
||||
ROOM_ID_PATTERN_DOMAINLESS = re.compile(r"^[A-Za-z0-9\-_]{43}$")
|
||||
|
||||
|
||||
@@ -36,8 +36,8 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from pydantic import ConfigDict
|
||||
|
||||
from synapse._pydantic_compat import Extra
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import (
|
||||
@@ -52,6 +52,7 @@ from synapse.types import (
|
||||
StreamToken,
|
||||
ThreadSubscriptionsToken,
|
||||
UserID,
|
||||
UserIDType,
|
||||
)
|
||||
from synapse.types.rest.client import SlidingSyncBody
|
||||
|
||||
@@ -67,17 +68,12 @@ class SlidingSyncConfig(SlidingSyncBody):
|
||||
extra fields that we need in the handler
|
||||
"""
|
||||
|
||||
user: UserID
|
||||
user: UserIDType
|
||||
requester: Requester
|
||||
|
||||
# Pydantic config
|
||||
class Config:
|
||||
# By default, ignore fields that we don't recognise.
|
||||
extra = Extra.ignore
|
||||
# By default, don't allow fields to be reassigned after parsing.
|
||||
allow_mutation = False
|
||||
# Allow custom types like `UserID` to be used in the model
|
||||
arbitrary_types_allowed = True
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True
|
||||
)
|
||||
|
||||
|
||||
class OperationType(Enum):
|
||||
|
||||
@@ -18,8 +18,24 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from pydantic import ConfigDict
|
||||
|
||||
from synapse.util.pydantic_models import ParseModel
|
||||
|
||||
|
||||
class RequestBodyModel(ParseModel):
|
||||
pass
|
||||
model_config = ConfigDict(
|
||||
# Allow custom types like `UserIDType` to be used in the model
|
||||
arbitrary_types_allowed=True,
|
||||
# By default, do not allow coercing field types.
|
||||
#
|
||||
# This saves subclassing models from needing to write i.e. "StrictStr"
|
||||
# instead of "str" in their fields.
|
||||
#
|
||||
# To revert to "lax" mode for a given field, use:
|
||||
#
|
||||
# ```
|
||||
# my_field: Annotated[str, Field(strict=False)]
|
||||
# ````
|
||||
strict=True,
|
||||
)
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
#
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from pydantic import ConfigDict
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
Extra,
|
||||
Field,
|
||||
@@ -123,6 +125,10 @@ class SlidingSyncBody(RequestBodyModel):
|
||||
extensions: Extensions API. A map of extension key to extension config.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True
|
||||
)
|
||||
|
||||
class CommonRoomParameters(RequestBodyModel):
|
||||
"""
|
||||
Common parameters shared between the sliding window and room subscription APIs.
|
||||
|
||||
@@ -40,7 +40,7 @@ class ParseModel(BaseModel):
|
||||
# By default, ignore fields that we don't recognise.
|
||||
extra = Extra.ignore
|
||||
# By default, don't allow fields to be reassigned after parsing.
|
||||
allow_mutation = False
|
||||
frozen = True
|
||||
|
||||
|
||||
class AnyEventId(StrictStr):
|
||||
|
||||
@@ -21,7 +21,9 @@
|
||||
|
||||
from twisted.internet.testing import MemoryReactor
|
||||
|
||||
from synapse.handlers.e2e_keys import DeviceKeys
|
||||
from synapse.server import HomeServer
|
||||
from synapse.types import UserID
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests.unittest import HomeserverTestCase
|
||||
@@ -30,47 +32,55 @@ from tests.unittest import HomeserverTestCase
|
||||
class EndToEndKeyStoreTestCase(HomeserverTestCase):
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
self.now_ms = 1470174257070
|
||||
self.test_user_id = "@alice:test"
|
||||
self.test_device_id = "TEST_DEVICE"
|
||||
self.test_device_keys = self._create_test_device_keys(self.test_user_id, self.test_device_id)
|
||||
|
||||
def _create_test_device_keys(self, user_id: str, device_id: str, public_key: str = "test_public_key") -> DeviceKeys:
|
||||
"""Create and return a test `DeviceKeys` object."""
|
||||
return DeviceKeys(
|
||||
algorithms=["ed25519"],
|
||||
device_id=device_id,
|
||||
keys={
|
||||
f"ed25519:{device_id}": public_key,
|
||||
},
|
||||
signatures={},
|
||||
user_id=UserID.from_string(user_id),
|
||||
)
|
||||
|
||||
def test_key_without_device_name(self) -> None:
|
||||
now = 1470174257070
|
||||
json = {"key": "value"}
|
||||
self.get_success(self.store.store_device(self.test_user_id, self.test_device_id, None))
|
||||
|
||||
self.get_success(self.store.store_device("user", "device", None))
|
||||
|
||||
self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
|
||||
self.get_success(self.store.set_e2e_device_keys(self.test_user_id, self.test_device_id, self.now_ms, self.test_device_keys))
|
||||
|
||||
res = self.get_success(
|
||||
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
|
||||
self.store.get_e2e_device_keys_for_cs_api(((self.test_user_id, self.test_device_id),))
|
||||
)
|
||||
self.assertIn("user", res)
|
||||
self.assertIn("device", res["user"])
|
||||
dev = res["user"]["device"]
|
||||
self.assertLessEqual(json.items(), dev.items())
|
||||
self.assertIn(self.test_user_id, res)
|
||||
self.assertIn(self.test_device_id, res[self.test_user_id])
|
||||
device_keys = res[self.test_user_id][self.test_device_id]
|
||||
|
||||
print(device_keys)
|
||||
|
||||
def test_reupload_key(self) -> None:
|
||||
now = 1470174257070
|
||||
json = {"key": "value"}
|
||||
|
||||
self.get_success(self.store.store_device("user", "device", None))
|
||||
|
||||
changed = self.get_success(
|
||||
self.store.set_e2e_device_keys("user", "device", now, json)
|
||||
self.store.set_e2e_device_keys("user", "device", self.now_ms, self.test_device_keys)
|
||||
)
|
||||
self.assertTrue(changed)
|
||||
|
||||
# If we try to upload the same key then we should be told nothing
|
||||
# changed
|
||||
changed = self.get_success(
|
||||
self.store.set_e2e_device_keys("user", "device", now, json)
|
||||
self.store.set_e2e_device_keys("user", "device", self.now_ms, self.test_device_keys)
|
||||
)
|
||||
self.assertFalse(changed)
|
||||
|
||||
def test_get_key_with_device_name(self) -> None:
|
||||
now = 1470174257070
|
||||
json = {"key": "value"}
|
||||
|
||||
self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
|
||||
self.get_success(self.store.store_device("user", "device", "display_name"))
|
||||
self.get_success(self.store.set_e2e_device_keys(self.test_user_id, self.test_device_id, self.now_ms, self.test_device_keys))
|
||||
self.get_success(self.store.store_device(self.test_user_id, self.test_device_id, "display_name"))
|
||||
|
||||
res = self.get_success(
|
||||
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
|
||||
@@ -87,34 +97,37 @@ class EndToEndKeyStoreTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
def test_multiple_devices(self) -> None:
|
||||
now = 1470174257070
|
||||
user_one = "@user1:test"
|
||||
user_two = "@user2:test"
|
||||
device_id_one = "DEVICE_ID_1"
|
||||
device_id_two = "DEVICE_ID_2"
|
||||
|
||||
self.get_success(self.store.store_device("user1", "device1", None))
|
||||
self.get_success(self.store.store_device("user1", "device2", None))
|
||||
self.get_success(self.store.store_device("user2", "device1", None))
|
||||
self.get_success(self.store.store_device("user2", "device2", None))
|
||||
self.get_success(self.store.store_device(user_one, device_id_one, None))
|
||||
self.get_success(self.store.store_device(user_one, device_id_two, None))
|
||||
self.get_success(self.store.store_device(user_two, device_id_one, None))
|
||||
self.get_success(self.store.store_device(user_two, device_id_two, None))
|
||||
|
||||
self.get_success(
|
||||
self.store.set_e2e_device_keys("user1", "device1", now, {"key": "json11"})
|
||||
self.store.set_e2e_device_keys(user_one, device_id_one, self.now_ms, self._create_test_device_keys(user_one, device_id_one, "json11"))
|
||||
)
|
||||
self.get_success(
|
||||
self.store.set_e2e_device_keys("user1", "device2", now, {"key": "json12"})
|
||||
self.store.set_e2e_device_keys(user_one, device_id_two, self.now_ms, self._create_test_device_keys(user_one, device_id_two, "json12"))
|
||||
)
|
||||
self.get_success(
|
||||
self.store.set_e2e_device_keys("user2", "device1", now, {"key": "json21"})
|
||||
self.store.set_e2e_device_keys(user_two, device_id_one, self.now_ms, self._create_test_device_keys(user_two, device_id_one, "json21"))
|
||||
)
|
||||
self.get_success(
|
||||
self.store.set_e2e_device_keys("user2", "device2", now, {"key": "json22"})
|
||||
self.store.set_e2e_device_keys(user_two, device_id_two, self.now_ms, self._create_test_device_keys(user_two, device_id_two, "json22"))
|
||||
)
|
||||
|
||||
res = self.get_success(
|
||||
self.store.get_e2e_device_keys_for_cs_api(
|
||||
(("user1", "device1"), ("user2", "device2"))
|
||||
((user_one, device_id_one), (user_two, device_id_two))
|
||||
)
|
||||
)
|
||||
self.assertIn("user1", res)
|
||||
self.assertIn("device1", res["user1"])
|
||||
self.assertNotIn("device2", res["user1"])
|
||||
self.assertIn("user2", res)
|
||||
self.assertNotIn("device1", res["user2"])
|
||||
self.assertIn("device2", res["user2"])
|
||||
self.assertIn(user_one, res)
|
||||
self.assertIn(device_id_one, res[user_one])
|
||||
self.assertNotIn(device_id_two, res[user_one])
|
||||
self.assertIn(user_two, res)
|
||||
self.assertNotIn(device_id_one, res[user_two])
|
||||
self.assertIn(device_id_two, res[user_two])
|
||||
|
||||
Reference in New Issue
Block a user