From 1bc4feb6c9762216e930daf0ddbdb86c77bf7724 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 21 Mar 2023 14:19:54 -0400 Subject: [PATCH 001/106] Apply & bundle edits for non-message events. (#15295) --- changelog.d/15295.bugfix | 1 + synapse/storage/databases/main/relations.py | 11 ++++------- 2 files changed, 5 insertions(+), 7 deletions(-) create mode 100644 changelog.d/15295.bugfix diff --git a/changelog.d/15295.bugfix b/changelog.d/15295.bugfix new file mode 100644 index 000000000000..e437ef3a01a1 --- /dev/null +++ b/changelog.d/15295.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index bc3a83919c34..3955a8a9a589 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -472,12 +472,11 @@ async def get_applicable_edits( the event will map to None. """ - # We only allow edits for `m.room.message` events that have the same sender - # and event type. We can't assert these things during regular event auth so - # we have to do the checks post hoc. + # We only allow edits for events that have the same sender and event type. + # We can't assert these things during regular event auth so we have to do + # the checks post hoc. - # Fetches latest edit that has the same type and sender as the - # original, and is an `m.room.message`. + # Fetches latest edit that has the same type and sender as the original. if isinstance(self.database_engine, PostgresEngine): # The `DISTINCT ON` clause will pick the *first* row it encounters, # so ordering by origin server ts + event ID desc will ensure we get @@ -493,7 +492,6 @@ async def get_applicable_edits( WHERE %s AND relation_type = ? - AND edit.type = 'm.room.message' ORDER by original.event_id DESC, edit.origin_server_ts DESC, edit.event_id DESC """ else: @@ -512,7 +510,6 @@ async def get_applicable_edits( WHERE %s AND relation_type = ? - AND edit.type = 'm.room.message' ORDER by edit.origin_server_ts, edit.event_id """ From 9b1f99ba6b5a3e508861fa02ad1101a38aa71e6c Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 21 Mar 2023 18:55:08 +0000 Subject: [PATCH 002/106] Remind maintainer to ask #synapse-dev for changelog (#15303) * Remind maintainer to ask #synapse-dev for changelog * Changelog --- changelog.d/15303.misc | 1 + scripts-dev/release.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15303.misc diff --git a/changelog.d/15303.misc b/changelog.d/15303.misc new file mode 100644 index 000000000000..977b9dcd02f8 --- /dev/null +++ b/changelog.d/15303.misc @@ -0,0 +1 @@ +Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 008a5bd965d4..ec92a59bb8be 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -280,7 +280,7 @@ def _prepare() -> None: ) print("Opening the changelog in your browser...") - print("Please ask others to give it a check.") + print("Please ask #synapse-dev to give it a check.") click.launch( f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md" ) From 882911a863f2b239a91846a03bbf5674f2d31862 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 21 Mar 2023 18:58:38 +0000 Subject: [PATCH 003/106] Allow running twisted trunk against other branches (#15302) * Allow running twisted trunk against other branches I would like to do this so we can try Synapse's typechecking against a specific branch that the project solicited tests for, see https://mail.python.org/archives/list/twisted@python.org/message/GGO5JHA5S475AK6JZ3GCC3GIHGKQYM6Y/ * Changelog --- .github/workflows/twisted_trunk.yml | 9 ++++++++- changelog.d/15302.misc | 1 + 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15302.misc diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index db514571c4af..7999dd956dec 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -5,6 +5,13 @@ on: - cron: 0 8 * * * workflow_dispatch: + inputs: + twisted_ref: + description: Commit, branch or tag to checkout from upstream Twisted. + required: false + default: 'trunk' + type: string + concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -29,7 +36,7 @@ jobs: extras: "all" - run: | poetry remove twisted - poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk + poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }} poetry install --no-interaction --extras "all test" - name: Remove warn_unused_ignores from mypy config run: sed '/warn_unused_ignores = True/d' -i mypy.ini diff --git a/changelog.d/15302.misc b/changelog.d/15302.misc new file mode 100644 index 000000000000..aadadf4b43ed --- /dev/null +++ b/changelog.d/15302.misc @@ -0,0 +1 @@ +Allow running the Twisted trunk job against other branches. From 8f2a3cbb70671c7074aee2482eab5a0b58aa0949 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 21 Mar 2023 17:05:47 -0400 Subject: [PATCH 004/106] Update 15295.bugfix --- changelog.d/15295.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/15295.bugfix b/changelog.d/15295.bugfix index e437ef3a01a1..af43035383bf 100644 --- a/changelog.d/15295.bugfix +++ b/changelog.d/15295.bugfix @@ -1 +1 @@ -Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. +Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. From b32014578a48dfa488a94036cabc7e3137c3b95a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Mar 2023 22:37:42 +0000 Subject: [PATCH 005/106] Bump sentry-sdk from 1.15.0 to 1.17.0 (#15285) * Bump sentry-sdk from 1.15.0 to 1.17.0 Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 1.15.0 to 1.17.0. - [Release notes](https://github.com/getsentry/sentry-python/releases) - [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md) - [Commits](https://github.com/getsentry/sentry-python/compare/1.15.0...1.17.0) --- updated-dependencies: - dependency-name: sentry-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: Mathieu Velten --- changelog.d/15285.misc | 1 + poetry.lock | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15285.misc diff --git a/changelog.d/15285.misc b/changelog.d/15285.misc new file mode 100644 index 000000000000..ad635ef3f29d --- /dev/null +++ b/changelog.d/15285.misc @@ -0,0 +1 @@ +Bump sentry-sdk from 1.15.0 to 1.17.0. diff --git a/poetry.lock b/poetry.lock index dc44e45a2cd2..ff8b43bac766 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2054,14 +2054,14 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.15.0" +version = "1.17.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.15.0.tar.gz", hash = "sha256:69ecbb2e1ff4db02a06c4f20f6f69cb5dfe3ebfbc06d023e40d77cf78e9c37e7"}, - {file = "sentry_sdk-1.15.0-py2.py3-none-any.whl", hash = "sha256:7ad4d37dd093f4a7cb5ad804c6efe9e8fab8873f7ffc06042dc3f3fd700a93ec"}, + {file = "sentry-sdk-1.17.0.tar.gz", hash = "sha256:ad40860325c94d1a656da70fba5a7c4dbb2f6809d3cc2d00f74ca0b608330f14"}, + {file = "sentry_sdk-1.17.0-py2.py3-none-any.whl", hash = "sha256:3c4e898f7a3edf5a2042cd0dcab6ee124e2112189228c272c08ad15d3850c201"}, ] [package.dependencies] @@ -2070,6 +2070,7 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] From 72f3f23c4db3f549a1aa060a7e98ba07812e570b Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 21 Mar 2023 17:59:55 -0700 Subject: [PATCH 006/106] Change the parameter `immediate` of `send_device_messages` to default to `True` (#15297) --- changelog.d/15297.bugfix | 1 + synapse/federation/send_queue.py | 2 +- synapse/federation/sender/__init__.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15297.bugfix diff --git a/changelog.d/15297.bugfix b/changelog.d/15297.bugfix new file mode 100644 index 000000000000..b5735fe4da85 --- /dev/null +++ b/changelog.d/15297.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 3063df799099..0b7c81677ebd 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -244,7 +244,7 @@ def send_presence_to_destinations( self.notifier.on_new_replication_data() - def send_device_messages(self, destination: str, immediate: bool = False) -> None: + def send_device_messages(self, destination: str, immediate: bool = True) -> None: """As per FederationSender""" # We don't need to replicate this as it gets sent down a different # stream. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 43421a9c727b..106daa91844f 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -783,7 +783,7 @@ def send_edu(self, edu: Edu, key: Optional[Hashable]) -> None: else: queue.send_edu(edu) - def send_device_messages(self, destination: str, immediate: bool = False) -> None: + def send_device_messages(self, destination: str, immediate: bool = True) -> None: if destination == self.server_name: logger.warning("Not sending device update to ourselves") return From 1bc9985eb7feca2a8eb4a0125cd03dfa3ac631fe Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 22 Mar 2023 12:53:55 +0000 Subject: [PATCH 007/106] Have replication clients remove _INT_STREAM_POS (#15309) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Have replication clients remove _INT_STREAM_POS Suppose worker A makes an internal http request from worker B. B may make changes that A later learns about over replication. We want A's request to block until it has seen those changes—mainly to ensure A's caches are invalidated promptly. This helps provide read-after-write consistency, eliminating entire categories of races and test flakes. To implement this, B includes a top-level field `_INT_STREAM_POS` in its response JSON. Roughly speaking, the field's value tells A what to wait for. But we weren't removing that internal field before A's request completed! Introduced in https://github.com/matrix-org/synapse/pull/14820. Fixes #15308. * Changelog --- changelog.d/15309.bugfix | 1 + synapse/replication/http/_base.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15309.bugfix diff --git a/changelog.d/15309.bugfix b/changelog.d/15309.bugfix new file mode 100644 index 000000000000..4d3fe4e4b10b --- /dev/null +++ b/changelog.d/15309.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index c20d9c7e9da7..8c2c54c07a49 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -345,7 +345,7 @@ async def send_request(*, instance_name: str = "master", **kwargs: Any) -> Any: _outgoing_request_counter.labels(cls.NAME, 200).inc() # Wait on any streams that the remote may have written to. - for stream_name, position in result.get( + for stream_name, position in result.pop( _STREAM_POSITION_KEY, {} ).items(): await replication.wait_for_stream_position( From 4b8c9c340c5aa46f063bf81159f0c3a7fbb0a47a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 15:27:36 +0000 Subject: [PATCH 008/106] Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d (#15304) * Bump dtolnay/rust-toolchain Bumps [dtolnay/rust-toolchain](https://github.com/dtolnay/rust-toolchain) from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. - [Release notes](https://github.com/dtolnay/rust-toolchain/releases) - [Commits](https://github.com/dtolnay/rust-toolchain/compare/e12eda571dc9a5ee5d58eecf4738ec291c66f295...fc3253060d0c959bea12a59f10f8391454a0b02d) --- updated-dependencies: - dependency-name: dtolnay/rust-toolchain dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/latest_deps.yml | 6 +++--- .github/workflows/tests.yml | 18 +++++++++--------- .github/workflows/twisted_trunk.yml | 6 +++--- changelog.d/15304.misc | 1 + 4 files changed, 16 insertions(+), 15 deletions(-) create mode 100644 changelog.d/15304.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 6da7c22e4c33..d5a68ffa1f12 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -27,7 +27,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 @@ -134,7 +134,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d2b1d75536bf..7f57f046fc87 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -113,7 +113,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 components: clippy @@ -135,7 +135,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: nightly-2022-12-01 components: clippy @@ -155,7 +155,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: # We use nightly so that it correctly groups together imports toolchain: nightly-2022-12-01 @@ -223,7 +223,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 @@ -268,7 +268,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 @@ -389,7 +389,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 @@ -534,7 +534,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 @@ -565,7 +565,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 @@ -588,7 +588,7 @@ jobs: # There don't seem to be versioned releases of this action per se: for each rust # version there is a branch which gets constantly rebased on top of master. # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: nightly-2022-12-01 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 7999dd956dec..461c85067ca4 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -25,7 +25,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 @@ -50,7 +50,7 @@ jobs: - run: sudo apt-get -qq install xmlsec1 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 @@ -89,7 +89,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295 + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d with: toolchain: stable - uses: Swatinem/rust-cache@v2 diff --git a/changelog.d/15304.misc b/changelog.d/15304.misc new file mode 100644 index 000000000000..38abb87a88bf --- /dev/null +++ b/changelog.d/15304.misc @@ -0,0 +1 @@ +Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. From 7f02fafa28b112fe9d136dc3fcb8799f5958fb95 Mon Sep 17 00:00:00 2001 From: Shay Date: Wed, 22 Mar 2023 08:36:42 -0700 Subject: [PATCH 009/106] Add a check to SQLite port DB script to ensure that the sqlite database passed to the script exists before trying to port from it (#15306) --- changelog.d/15306.bugfix | 2 ++ synapse/_scripts/synapse_port_db.py | 8 ++++++++ 2 files changed, 10 insertions(+) create mode 100644 changelog.d/15306.bugfix diff --git a/changelog.d/15306.bugfix b/changelog.d/15306.bugfix new file mode 100644 index 000000000000..f5eb716f12c5 --- /dev/null +++ b/changelog.d/15306.bugfix @@ -0,0 +1,2 @@ +Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite) +to ensure that the sqlite database passed to the script exists before trying to port from it. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 78d76d38ad90..94b86c1d6ff5 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -18,6 +18,7 @@ import argparse import curses import logging +import os import sys import time import traceback @@ -1326,6 +1327,13 @@ def main() -> None: filename="port-synapse.log" if args.curses else None, ) + if not os.path.isfile(args.sqlite_database): + sys.stderr.write( + "The sqlite database you specified does not exist, please check that you have the" + "correct path." + ) + sys.exit(1) + sqlite_config = { "name": "sqlite3", "args": { From 3b0083c92adf76daf4161908565de9e5efc08074 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Wed, 22 Mar 2023 17:15:34 +0000 Subject: [PATCH 010/106] Use immutabledict instead of frozendict (#15113) Additionally: * Consistently use `freeze()` in test --------- Co-authored-by: Patrick Cloke Co-authored-by: 6543 <6543@obermui.de> --- changelog.d/15113.misc | 1 + poetry.lock | 125 +++-------------------- pyproject.toml | 12 +-- stubs/frozendict.pyi | 39 ------- synapse/__init__.py | 19 +++- synapse/crypto/event_signing.py | 2 +- synapse/events/snapshot.py | 4 +- synapse/events/utils.py | 2 +- synapse/events/validator.py | 2 +- synapse/state/__init__.py | 10 +- synapse/storage/databases/main/stream.py | 4 +- synapse/types/__init__.py | 12 +-- synapse/types/state.py | 26 ++--- synapse/util/__init__.py | 20 ++-- synapse/util/frozenutils.py | 6 +- tests/api/test_filtering.py | 6 +- tests/config/test_workers.py | 6 +- tests/push/test_push_rule_evaluator.py | 18 ++-- tests/storage/test_state.py | 40 ++++---- tests/types/test_state.py | 14 +-- 20 files changed, 124 insertions(+), 244 deletions(-) create mode 100644 changelog.d/15113.misc delete mode 100644 stubs/frozendict.pyi diff --git a/changelog.d/15113.misc b/changelog.d/15113.misc new file mode 100644 index 000000000000..6917dd56528b --- /dev/null +++ b/changelog.d/15113.misc @@ -0,0 +1 @@ +Use `immutabledict` instead of `frozendict`. diff --git a/poetry.lock b/poetry.lock index ff8b43bac766..76fbfafcf9eb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -160,23 +160,16 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] [[package]] name = "canonicaljson" -version = "1.6.5" +version = "2.0.0" description = "Canonical JSON" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "canonicaljson-1.6.5-py3-none-any.whl", hash = "sha256:806ea6f2cbb7405d20259e1c36dd1214ba5c242fa9165f5bd0bf2081f82c23fb"}, - {file = "canonicaljson-1.6.5.tar.gz", hash = "sha256:68dfc157b011e07d94bf74b5d4ccc01958584ed942d9dfd5fdd706609e81cd4b"}, + {file = "canonicaljson-2.0.0-py3-none-any.whl", hash = "sha256:c38a315de3b5a0532f1ec1f9153cd3d716abfc565a558d00a4835428a34fca5b"}, + {file = "canonicaljson-2.0.0.tar.gz", hash = "sha256:e2fdaef1d7fadc5d9cb59bd3d0d41b064ddda697809ac4325dced721d12f113f"}, ] -[package.dependencies] -simplejson = ">=3.14.0" -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.8\""} - -[package.extras] -frozendict = ["frozendict (>=1.0)"] - [[package]] name = "certifi" version = "2022.12.7" @@ -453,33 +446,6 @@ files = [ [package.extras] dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"] -[[package]] -name = "frozendict" -version = "2.3.4" -description = "A simple immutable dictionary" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "frozendict-2.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc"}, - {file = "frozendict-2.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782"}, - {file = "frozendict-2.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439"}, - {file = "frozendict-2.3.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a"}, - {file = "frozendict-2.3.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90"}, - {file = "frozendict-2.3.4-cp36-cp36m-win_amd64.whl", hash = "sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3"}, - {file = "frozendict-2.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8"}, - {file = "frozendict-2.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba"}, - {file = "frozendict-2.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9"}, - {file = "frozendict-2.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7"}, - {file = "frozendict-2.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299"}, - {file = "frozendict-2.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e"}, - {file = "frozendict-2.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3"}, - {file = "frozendict-2.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a"}, - {file = "frozendict-2.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072"}, - {file = "frozendict-2.3.4-py3-none-any.whl", hash = "sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15"}, - {file = "frozendict-2.3.4.tar.gz", hash = "sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78"}, -] - [[package]] name = "gitdb" version = "4.0.9" @@ -725,6 +691,18 @@ files = [ {file = "ijson-3.2.0.post0.tar.gz", hash = "sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316"}, ] +[[package]] +name = "immutabledict" +version = "2.2.3" +description = "Immutable wrapper around dictionaries (a fork of frozendict)" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "immutabledict-2.2.3-py3-none-any.whl", hash = "sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714"}, + {file = "immutabledict-2.2.3.tar.gz", hash = "sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853"}, +] + [[package]] name = "importlib-metadata" version = "6.0.0" @@ -2174,77 +2152,6 @@ unpaddedbase64 = ">=1.0.1" [package.extras] dev = ["typing-extensions (>=3.5)"] -[[package]] -name = "simplejson" -version = "3.17.6" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, - {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, - {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, - {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, - {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, - {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, - {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, - {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, - {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, - {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, - {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, - {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, - {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, - {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, - {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, - {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, - {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, - {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, - {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, - {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, - {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, - {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, - {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, - {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, - {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, - {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, - {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, - {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, - {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, -] - [[package]] name = "six" version = "1.16.0" @@ -3013,4 +2920,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "de2c4c8de336593478ce02581a5336afe2544db93ea82f3955b34c3653c29a26" +content-hash = "0ca92e52a1952f9485172efe25a039351280c28f0a158869557dc2f8855786fe" diff --git a/pyproject.toml b/pyproject.toml index 19dc7c1536cf..c0111dd79696 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,15 +153,13 @@ python = "^3.7.1" # ---------------------- # we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0 jsonschema = ">=3.0.0" -# frozendict 2.1.2 is broken on Debian 10: https://github.com/Marco-Sulla/python-frozendict/issues/41 -# We cannot test our wheels against the 2.3.5 release in CI. Putting in an upper bound for this -# because frozendict has been more trouble than it's worth; we would like to move to immutabledict. -frozendict = ">=1,!=2.1.2,<2.3.5" +# We choose 2.0 as a lower bound: the most recent backwards incompatible release. +# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict +immutabledict = ">=2.0" # We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0 unpaddedbase64 = ">=2.1.0" -# We require 1.5.0 to work around an issue when running against the C implementation of -# frozendict: https://github.com/matrix-org/python-canonicaljson/issues/36 -canonicaljson = "^1.5.0" +# We require 2.0.0 for immutabledict support. +canonicaljson = "^2.0.0" # we use the type definitions added in signedjson 1.1. signedjson = "^1.1.0" # validating SSL certs for IP addresses requires service_identity 18.1. diff --git a/stubs/frozendict.pyi b/stubs/frozendict.pyi deleted file mode 100644 index 196dee4461ea..000000000000 --- a/stubs/frozendict.pyi +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2020 The Matrix.org Foundation C.I.C. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Stub for frozendict. - -from __future__ import annotations - -from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload - -_KT = TypeVar("_KT", bound=Hashable) # Key type. -_VT = TypeVar("_VT") # Value type. - -class frozendict(Mapping[_KT, _VT]): - @overload - def __init__(self, **kwargs: _VT) -> None: ... - @overload - def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... - @overload - def __init__( - self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT - ) -> None: ... - def __getitem__(self, key: _KT) -> _VT: ... - def __contains__(self, key: Any) -> bool: ... - def copy(self, **add_or_replace: Any) -> frozendict: ... - def __iter__(self) -> Iterator[_KT]: ... - def __len__(self) -> int: ... - def __repr__(self) -> str: ... - def __hash__(self) -> int: ... diff --git a/synapse/__init__.py b/synapse/__init__.py index a203ed533ae5..b97ee59f15b6 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,9 +17,9 @@ """ This is an implementation of a Matrix homeserver. """ -import json import os import sys +from typing import Any, Dict from synapse.util.rust import check_rust_lib_up_to_date from synapse.util.stringutils import strtobool @@ -61,11 +61,20 @@ except ImportError: pass -# Use the standard library json implementation instead of simplejson. +# Teach canonicaljson how to serialise immutabledicts. try: - from canonicaljson import set_json_library - - set_json_library(json) + from canonicaljson import register_preserialisation_callback + from immutabledict import immutabledict + + def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]: + try: + return d._dict + except Exception: + # Paranoia: fall back to a `dict()` call, in case a future version of + # immutabledict removes `_dict` from the implementation. + return dict(d) + + register_preserialisation_callback(immutabledict, _immutabledict_cb) except ImportError: pass diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 23b799ac3250..1a293f1df008 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -51,7 +51,7 @@ def check_event_content_hash( # some malformed events lack a 'hashes'. Protect against it being missing # or a weird type by basically treating it the same as an unhashed event. hashes = event.get("hashes") - # nb it might be a frozendict or a dict + # nb it might be a immutabledict or a dict if not isinstance(hashes, collections.abc.Mapping): raise SynapseError( 400, "Malformed 'hashes': %s" % (type(hashes),), Codes.UNAUTHORIZED diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index c04ad08cbb61..9b4d692cf478 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -15,7 +15,7 @@ from typing import TYPE_CHECKING, List, Optional, Tuple import attr -from frozendict import frozendict +from immutabledict import immutabledict from synapse.appservice import ApplicationService from synapse.events import EventBase @@ -489,4 +489,4 @@ def _decode_state_dict( if input is None: return None - return frozendict({(etype, state_key): v for etype, state_key, v in input}) + return immutabledict({(etype, state_key): v for etype, state_key, v in input}) diff --git a/synapse/events/utils.py b/synapse/events/utils.py index b9c15ffcdb36..e41c7a4b83e6 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -567,7 +567,7 @@ def serialize_events( def copy_and_fixup_power_levels_contents( old_power_levels: PowerLevelsContent, ) -> Dict[str, Union[int, Dict[str, int]]]: - """Copy the content of a power_levels event, unfreezing frozendicts along the way. + """Copy the content of a power_levels event, unfreezing immutabledicts along the way. We accept as input power level values which are strings, provided they represent an integer, e.g. `"`100"` instead of 100. Such strings are converted to integers diff --git a/synapse/events/validator.py b/synapse/events/validator.py index fb1737b910e9..6f0e4386d3f1 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -258,7 +258,7 @@ def _ensure_state_event(self, event: Union[EventBase, EventBuilder]) -> None: def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]: validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA) - # by default jsonschema does not consider a frozendict to be an object so + # by default jsonschema does not consider a immutabledict to be an object so # we need to use a custom type checker # https://python-jsonschema.readthedocs.io/en/stable/validate/?highlight=object#validating-with-additional-types type_checker = validator.TYPE_CHECKER.redefine( diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 4dc25df67e17..603109524932 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -33,7 +33,7 @@ ) import attr -from frozendict import frozendict +from immutabledict import immutabledict from prometheus_client import Counter, Histogram from synapse.api.constants import EventTypes @@ -105,14 +105,18 @@ def __init__( # # This can be None if we have a `state_group` (as then we can fetch the # state from the DB.) - self._state = frozendict(state) if state is not None else None + self._state: Optional[StateMap[str]] = ( + immutabledict(state) if state is not None else None + ) # the ID of a state group if one and only one is involved. # otherwise, None otherwise? self.state_group = state_group self.prev_group = prev_group - self.delta_ids = frozendict(delta_ids) if delta_ids is not None else None + self.delta_ids: Optional[StateMap[str]] = ( + immutabledict(delta_ids) if delta_ids is not None else None + ) async def get_state( self, diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index ac5fbf6b8621..2b8779bbb8a1 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -50,7 +50,7 @@ ) import attr -from frozendict import frozendict +from immutabledict import immutabledict from typing_extensions import Literal from twisted.internet import defer @@ -557,7 +557,7 @@ def get_room_max_token(self) -> RoomStreamToken: if p > min_pos } - return RoomStreamToken(None, min_pos, frozendict(positions)) + return RoomStreamToken(None, min_pos, immutabledict(positions)) async def get_room_events_stream_for_rooms( self, diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 33363867c456..c09b9cf87ddc 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -35,7 +35,7 @@ ) import attr -from frozendict import frozendict +from immutabledict import immutabledict from signedjson.key import decode_verify_key_bytes from signedjson.types import VerifyKey from typing_extensions import Final, TypedDict @@ -490,12 +490,12 @@ class RoomStreamToken: ) stream: int = attr.ib(validator=attr.validators.instance_of(int)) - instance_map: "frozendict[str, int]" = attr.ib( - factory=frozendict, + instance_map: "immutabledict[str, int]" = attr.ib( + factory=immutabledict, validator=attr.validators.deep_mapping( key_validator=attr.validators.instance_of(str), value_validator=attr.validators.instance_of(int), - mapping_validator=attr.validators.instance_of(frozendict), + mapping_validator=attr.validators.instance_of(immutabledict), ), ) @@ -531,7 +531,7 @@ async def parse(cls, store: "PurgeEventsStore", string: str) -> "RoomStreamToken return cls( topological=None, stream=stream, - instance_map=frozendict(instance_map), + instance_map=immutabledict(instance_map), ) except CancelledError: raise @@ -566,7 +566,7 @@ def copy_and_advance(self, other: "RoomStreamToken") -> "RoomStreamToken": for instance in set(self.instance_map).union(other.instance_map) } - return RoomStreamToken(None, max_stream, frozendict(instance_map)) + return RoomStreamToken(None, max_stream, immutabledict(instance_map)) def as_historical_tuple(self) -> Tuple[int, int]: """Returns a tuple of `(topological, stream)` for historical tokens. diff --git a/synapse/types/state.py b/synapse/types/state.py index 4b3071acce32..1e78a740475b 100644 --- a/synapse/types/state.py +++ b/synapse/types/state.py @@ -28,7 +28,7 @@ ) import attr -from frozendict import frozendict +from immutabledict import immutabledict from synapse.api.constants import EventTypes from synapse.types import MutableStateMap, StateKey, StateMap @@ -56,7 +56,7 @@ class StateFilter: appear in `types`. """ - types: "frozendict[str, Optional[FrozenSet[str]]]" + types: "immutabledict[str, Optional[FrozenSet[str]]]" include_others: bool = False def __attrs_post_init__(self) -> None: @@ -67,7 +67,7 @@ def __attrs_post_init__(self) -> None: object.__setattr__( self, "types", - frozendict({k: v for k, v in self.types.items() if v is not None}), + immutabledict({k: v for k, v in self.types.items() if v is not None}), ) @staticmethod @@ -112,7 +112,7 @@ def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": type_dict.setdefault(typ, set()).add(s) # type: ignore return StateFilter( - types=frozendict( + types=immutabledict( (k, frozenset(v) if v is not None else None) for k, v in type_dict.items() ) @@ -139,7 +139,7 @@ def from_lazy_load_member_list(members: Iterable[str]) -> "StateFilter": The new state filter """ return StateFilter( - types=frozendict({EventTypes.Member: frozenset(members)}), + types=immutabledict({EventTypes.Member: frozenset(members)}), include_others=True, ) @@ -159,7 +159,7 @@ def freeze( types_with_frozen_values[state_types] = None return StateFilter( - frozendict(types_with_frozen_values), include_others=include_others + immutabledict(types_with_frozen_values), include_others=include_others ) def return_expanded(self) -> "StateFilter": @@ -217,7 +217,7 @@ def return_expanded(self) -> "StateFilter": # We want to return all non-members, but only particular # memberships return StateFilter( - types=frozendict({EventTypes.Member: self.types[EventTypes.Member]}), + types=immutabledict({EventTypes.Member: self.types[EventTypes.Member]}), include_others=True, ) else: @@ -381,14 +381,16 @@ def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]: if state_keys is None: member_filter = StateFilter.all() else: - member_filter = StateFilter(frozendict({EventTypes.Member: state_keys})) + member_filter = StateFilter( + immutabledict({EventTypes.Member: state_keys}) + ) elif self.include_others: member_filter = StateFilter.all() else: member_filter = StateFilter.none() non_member_filter = StateFilter( - types=frozendict( + types=immutabledict( {k: v for k, v in self.types.items() if k != EventTypes.Member} ), include_others=self.include_others, @@ -578,8 +580,8 @@ def must_await_full_state(self, is_mine_id: Callable[[str], bool]) -> bool: return False -_ALL_STATE_FILTER = StateFilter(types=frozendict(), include_others=True) +_ALL_STATE_FILTER = StateFilter(types=immutabledict(), include_others=True) _ALL_NON_MEMBER_STATE_FILTER = StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), include_others=True + types=immutabledict({EventTypes.Member: frozenset()}), include_others=True ) -_NONE_STATE_FILTER = StateFilter(types=frozendict(), include_others=False) +_NONE_STATE_FILTER = StateFilter(types=immutabledict(), include_others=False) diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 7be9d5f11335..9ddd26ccaa2d 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -18,7 +18,7 @@ from typing import Any, Callable, Dict, Generator, Optional, Sequence import attr -from frozendict import frozendict +from immutabledict import immutabledict from matrix_common.versionstring import get_distribution_version_string from typing_extensions import ParamSpec @@ -41,22 +41,18 @@ def _reject_invalid_json(val: Any) -> None: raise ValueError("Invalid JSON value: '%s'" % val) -def _handle_frozendict(obj: Any) -> Dict[Any, Any]: - """Helper for json_encoder. Makes frozendicts serializable by returning +def _handle_immutabledict(obj: Any) -> Dict[Any, Any]: + """Helper for json_encoder. Makes immutabledicts serializable by returning the underlying dict """ - if type(obj) is frozendict: + if type(obj) is immutabledict: # fishing the protected dict out of the object is a bit nasty, # but we don't really want the overhead of copying the dict. try: # Safety: we catch the AttributeError immediately below. - # See https://github.com/matrix-org/python-canonicaljson/issues/36#issuecomment-927816293 - # for discussion on how frozendict's internals have changed over time. - return obj._dict # type: ignore[attr-defined] + return obj._dict except AttributeError: - # When the C implementation of frozendict is used, - # there isn't a `_dict` attribute with a dict - # so we resort to making a copy of the frozendict + # If all else fails, resort to making a copy of the immutabledict return dict(obj) raise TypeError( "Object of type %s is not JSON serializable" % obj.__class__.__name__ @@ -64,11 +60,11 @@ def _handle_frozendict(obj: Any) -> Dict[Any, Any]: # A custom JSON encoder which: -# * handles frozendicts +# * handles immutabledicts # * produces valid JSON (no NaNs etc) # * reduces redundant whitespace json_encoder = json.JSONEncoder( - allow_nan=False, separators=(",", ":"), default=_handle_frozendict + allow_nan=False, separators=(",", ":"), default=_handle_immutabledict ) # Create a custom decoder to reject Python extensions to JSON. diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 7223af1a36e7..889caa2601e3 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -14,14 +14,14 @@ import collections.abc from typing import Any -from frozendict import frozendict +from immutabledict import immutabledict def freeze(o: Any) -> Any: if isinstance(o, dict): - return frozendict({k: freeze(v) for k, v in o.items()}) + return immutabledict({k: freeze(v) for k, v in o.items()}) - if isinstance(o, frozendict): + if isinstance(o, immutabledict): return o if isinstance(o, (bytes, str)): diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 0f45615160ed..6c6a9ab4b4a9 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -18,7 +18,6 @@ from unittest.mock import patch import jsonschema -from frozendict import frozendict from twisted.test.proto_helpers import MemoryReactor @@ -29,6 +28,7 @@ from synapse.server import HomeServer from synapse.types import JsonDict from synapse.util import Clock +from synapse.util.frozenutils import freeze from tests import unittest from tests.events.test_utils import MockEvent @@ -343,12 +343,12 @@ def test_filter_labels(self) -> None: self.assertFalse(Filter(self.hs, definition)._check(event)) - # check it works with frozendicts too + # check it works with frozen dictionaries too event = MockEvent( sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown", - content=frozendict({EventContentFields.LABELS: ["#fun"]}), + content=freeze({EventContentFields.LABELS: ["#fun"]}), ) self.assertTrue(Filter(self.hs, definition)._check(event)) diff --git a/tests/config/test_workers.py b/tests/config/test_workers.py index ef6294ecb2ce..49a6bdf40837 100644 --- a/tests/config/test_workers.py +++ b/tests/config/test_workers.py @@ -14,14 +14,14 @@ from typing import Any, Mapping, Optional from unittest.mock import Mock -from frozendict import frozendict +from immutabledict import immutabledict from synapse.config import ConfigError from synapse.config.workers import WorkerConfig from tests.unittest import TestCase -_EMPTY_FROZENDICT: Mapping[str, Any] = frozendict() +_EMPTY_IMMUTABLEDICT: Mapping[str, Any] = immutabledict() class WorkerDutyConfigTestCase(TestCase): @@ -29,7 +29,7 @@ def _make_worker_config( self, worker_app: str, worker_name: Optional[str], - extras: Mapping[str, Any] = _EMPTY_FROZENDICT, + extras: Mapping[str, Any] = _EMPTY_IMMUTABLEDICT, ) -> WorkerConfig: root_config = Mock() root_config.worker_app = worker_app diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 52c4aafea652..b2536562e05a 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -14,8 +14,6 @@ from typing import Any, Dict, List, Optional, Union, cast -import frozendict - from twisted.test.proto_helpers import MemoryReactor import synapse.rest.admin @@ -318,11 +316,11 @@ def test_event_match_non_body(self) -> None: "pattern should only match at the start/end of the value", ) - # it should work on frozendicts too + # it should work on frozen dictionaries too self._assert_matches( condition, - frozendict.frozendict({"value": "FoobaZ"}), - "patterns should match on frozendicts", + freeze({"value": "FoobaZ"}), + "patterns should match on frozen dictionaries", ) # wildcards should match @@ -425,11 +423,11 @@ def test_exact_event_match_string(self) -> None: "incorrect types should not match", ) - # it should work on frozendicts too + # it should work on frozen dictionaries too self._assert_matches( condition, - frozendict.frozendict({"value": "foobaz"}), - "values should match on frozendicts", + freeze({"value": "foobaz"}), + "values should match on frozen dictionaries", ) def test_exact_event_match_boolean(self) -> None: @@ -546,11 +544,11 @@ def test_exact_event_property_contains(self) -> None: "does not search in a string", ) - # it should work on frozendicts too + # it should work on frozen dictionaries too self._assert_matches( condition, freeze({"value": ["foobaz"]}), - "values should match on frozendicts", + "values should match on frozen dictionaries", ) def test_no_body(self) -> None: diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 62aed6af0a73..0b9446c36c05 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -14,7 +14,7 @@ import logging -from frozendict import frozendict +from immutabledict import immutabledict from twisted.test.proto_helpers import MemoryReactor @@ -198,7 +198,7 @@ def test_get_state_for_event(self) -> None: self.storage.state.get_state_for_event( e5.event_id, state_filter=StateFilter( - types=frozendict( + types=immutabledict( {EventTypes.Member: frozenset({self.u_alice.to_string()})} ), include_others=True, @@ -220,7 +220,7 @@ def test_get_state_for_event(self) -> None: self.storage.state.get_state_for_event( e5.event_id, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), + types=immutabledict({EventTypes.Member: frozenset()}), include_others=True, ), ) @@ -246,7 +246,8 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), include_others=True + types=immutabledict({EventTypes.Member: frozenset()}), + include_others=True, ), ) @@ -263,7 +264,8 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), include_others=True + types=immutabledict({EventTypes.Member: frozenset()}), + include_others=True, ), ) @@ -276,7 +278,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: None}), include_others=True + types=immutabledict({EventTypes.Member: None}), include_others=True ), ) @@ -293,7 +295,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: None}), include_others=True + types=immutabledict({EventTypes.Member: None}), include_others=True ), ) @@ -313,7 +315,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=True, ), ) @@ -331,7 +333,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=True, ), ) @@ -345,7 +347,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=False, ), ) @@ -396,7 +398,8 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), include_others=True + types=immutabledict({EventTypes.Member: frozenset()}), + include_others=True, ), ) @@ -408,7 +411,8 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset()}), include_others=True + types=immutabledict({EventTypes.Member: frozenset()}), + include_others=True, ), ) @@ -421,7 +425,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: None}), include_others=True + types=immutabledict({EventTypes.Member: None}), include_others=True ), ) @@ -432,7 +436,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: None}), include_others=True + types=immutabledict({EventTypes.Member: None}), include_others=True ), ) @@ -451,7 +455,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=True, ), ) @@ -463,7 +467,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=True, ), ) @@ -477,7 +481,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=False, ), ) @@ -489,7 +493,7 @@ def test_get_state_for_event(self) -> None: self.state_datastore._state_group_members_cache, group, state_filter=StateFilter( - types=frozendict({EventTypes.Member: frozenset({e5.state_key})}), + types=immutabledict({EventTypes.Member: frozenset({e5.state_key})}), include_others=False, ), ) diff --git a/tests/types/test_state.py b/tests/types/test_state.py index eb809f9fb739..1d89582c448d 100644 --- a/tests/types/test_state.py +++ b/tests/types/test_state.py @@ -1,4 +1,4 @@ -from frozendict import frozendict +from immutabledict import immutabledict from synapse.api.constants import EventTypes from synapse.types.state import StateFilter @@ -172,7 +172,7 @@ def test_state_filter_difference_include_other_minus_no_include_other(self) -> N }, include_others=False, ), - StateFilter(types=frozendict(), include_others=True), + StateFilter(types=immutabledict(), include_others=True), ) # (wildcard on state keys) - (no state keys) @@ -188,7 +188,7 @@ def test_state_filter_difference_include_other_minus_no_include_other(self) -> N include_others=False, ), StateFilter( - types=frozendict(), + types=immutabledict(), include_others=True, ), ) @@ -279,7 +279,7 @@ def test_state_filter_difference_include_other_minus_include_other(self) -> None {EventTypes.Member: None, EventTypes.CanonicalAlias: None}, include_others=True, ), - StateFilter(types=frozendict(), include_others=False), + StateFilter(types=immutabledict(), include_others=False), ) # (wildcard on state keys) - (specific state keys) @@ -332,7 +332,7 @@ def test_state_filter_difference_include_other_minus_include_other(self) -> None include_others=True, ), StateFilter( - types=frozendict(), + types=immutabledict(), include_others=False, ), ) @@ -403,7 +403,7 @@ def test_state_filter_difference_no_include_other_minus_include_other(self) -> N {EventTypes.Member: None, EventTypes.CanonicalAlias: None}, include_others=True, ), - StateFilter(types=frozendict(), include_others=False), + StateFilter(types=immutabledict(), include_others=False), ) # (wildcard on state keys) - (specific state keys) @@ -450,7 +450,7 @@ def test_state_filter_difference_no_include_other_minus_include_other(self) -> N include_others=True, ), StateFilter( - types=frozendict(), + types=immutabledict(), include_others=False, ), ) From 98fd558382226b347d78e5f6e6782c6e74c25e69 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 23 Mar 2023 12:11:14 +0000 Subject: [PATCH 011/106] Add a primitive helper script for listing worker endpoints. (#15243) Co-authored-by: Patrick Cloke --- changelog.d/15243.feature | 1 + synapse/_scripts/generate_workers_map.py | 302 ++++++++++++++++++ .../federation/transport/server/__init__.py | 2 + .../federation/transport/server/federation.py | 27 ++ synapse/rest/client/_base.py | 23 +- synapse/rest/client/account.py | 4 + synapse/rest/client/account_data.py | 2 + synapse/rest/client/devices.py | 2 + synapse/rest/client/events.py | 2 + synapse/rest/client/filter.py | 2 + synapse/rest/client/initial_sync.py | 1 + synapse/rest/client/keys.py | 4 + synapse/rest/client/knock.py | 1 + synapse/rest/client/login.py | 4 + synapse/rest/client/presence.py | 1 + synapse/rest/client/profile.py | 3 + synapse/rest/client/push_rule.py | 3 + synapse/rest/client/read_marker.py | 1 + synapse/rest/client/receipts.py | 1 + synapse/rest/client/register.py | 2 + synapse/rest/client/relations.py | 2 + synapse/rest/client/room.py | 33 +- synapse/rest/client/room_batch.py | 1 + synapse/rest/client/room_keys.py | 3 + synapse/rest/client/sendtodevice.py | 1 + synapse/rest/client/sync.py | 1 + synapse/rest/client/tags.py | 2 + synapse/rest/client/user_directory.py | 1 + synapse/rest/client/versions.py | 1 + synapse/rest/client/voip.py | 1 + synapse/rest/key/v2/remote_key_resource.py | 2 + 31 files changed, 424 insertions(+), 12 deletions(-) create mode 100644 changelog.d/15243.feature create mode 100755 synapse/_scripts/generate_workers_map.py diff --git a/changelog.d/15243.feature b/changelog.d/15243.feature new file mode 100644 index 000000000000..c45e974c4c7b --- /dev/null +++ b/changelog.d/15243.feature @@ -0,0 +1 @@ +Add a primitive helper script for listing worker endpoints. \ No newline at end of file diff --git a/synapse/_scripts/generate_workers_map.py b/synapse/_scripts/generate_workers_map.py new file mode 100755 index 000000000000..6c0887852368 --- /dev/null +++ b/synapse/_scripts/generate_workers_map.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python +# Copyright 2022-2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import logging +import re +from collections import defaultdict +from dataclasses import dataclass +from typing import Dict, Iterable, Optional, Pattern, Set, Tuple + +import yaml + +from synapse.config.homeserver import HomeServerConfig +from synapse.federation.transport.server import ( + TransportLayerServer, + register_servlets as register_federation_servlets, +) +from synapse.http.server import HttpServer, ServletCallback +from synapse.rest import ClientRestResource +from synapse.rest.key.v2 import RemoteKey +from synapse.server import HomeServer +from synapse.storage import DataStore + +logger = logging.getLogger("generate_workers_map") + + +class MockHomeserver(HomeServer): + DATASTORE_CLASS = DataStore # type: ignore + + def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None: + super().__init__(config.server.server_name, config=config) + self.config.worker.worker_app = worker_app + + +GROUP_PATTERN = re.compile(r"\(\?P<[^>]+?>(.+?)\)") + + +@dataclass +class EndpointDescription: + """ + Describes an endpoint and how it should be routed. + """ + + # The servlet class that handles this endpoint + servlet_class: object + + # The category of this endpoint. Is read from the `CATEGORY` constant in the servlet + # class. + category: Optional[str] + + # TODO: + # - does it need to be routed based on a stream writer config? + # - does it benefit from any optimised, but optional, routing? + # - what 'opinionated synapse worker class' (event_creator, synchrotron, etc) does + # it go in? + + +class EnumerationResource(HttpServer): + """ + Accepts servlet registrations for the purposes of building up a description of + all endpoints. + """ + + def __init__(self, is_worker: bool) -> None: + self.registrations: Dict[Tuple[str, str], EndpointDescription] = {} + self._is_worker = is_worker + + def register_paths( + self, + method: str, + path_patterns: Iterable[Pattern], + callback: ServletCallback, + servlet_classname: str, + ) -> None: + # federation servlet callbacks are wrapped, so unwrap them. + callback = getattr(callback, "__wrapped__", callback) + + # fish out the servlet class + servlet_class = callback.__self__.__class__ # type: ignore + + if self._is_worker and method in getattr( + servlet_class, "WORKERS_DENIED_METHODS", () + ): + # This endpoint would cause an error if called on a worker, so pretend it + # was never registered! + return + + sd = EndpointDescription( + servlet_class=servlet_class, + category=getattr(servlet_class, "CATEGORY", None), + ) + + for pat in path_patterns: + self.registrations[(method, pat.pattern)] = sd + + +def get_registered_paths_for_hs( + hs: HomeServer, +) -> Dict[Tuple[str, str], EndpointDescription]: + """ + Given a homeserver, get all registered endpoints and their descriptions. + """ + + enumerator = EnumerationResource(is_worker=hs.config.worker.worker_app is not None) + ClientRestResource.register_servlets(enumerator, hs) + federation_server = TransportLayerServer(hs) + + # we can't use `federation_server.register_servlets` but this line does the + # same thing, only it uses this enumerator + register_federation_servlets( + federation_server.hs, + resource=enumerator, + ratelimiter=federation_server.ratelimiter, + authenticator=federation_server.authenticator, + servlet_groups=federation_server.servlet_groups, + ) + + # the key server endpoints are separate again + RemoteKey(hs).register(enumerator) + + return enumerator.registrations + + +def get_registered_paths_for_default( + worker_app: Optional[str], base_config: HomeServerConfig +) -> Dict[Tuple[str, str], EndpointDescription]: + """ + Given the name of a worker application and a base homeserver configuration, + returns: + + Dict from (method, path) to EndpointDescription + + TODO Don't require passing in a config + """ + + hs = MockHomeserver(base_config, worker_app) + # TODO We only do this to avoid an error, but don't need the database etc + hs.setup() + return get_registered_paths_for_hs(hs) + + +def elide_http_methods_if_unconflicting( + registrations: Dict[Tuple[str, str], EndpointDescription], + all_possible_registrations: Dict[Tuple[str, str], EndpointDescription], +) -> Dict[Tuple[str, str], EndpointDescription]: + """ + Elides HTTP methods (by replacing them with `*`) if all possible registered methods + can be handled by the worker whose registration map is `registrations`. + + i.e. the only endpoints left with methods (other than `*`) should be the ones where + the worker can't handle all possible methods for that path. + """ + + def paths_to_methods_dict( + methods_and_paths: Iterable[Tuple[str, str]] + ) -> Dict[str, Set[str]]: + """ + Given (method, path) pairs, produces a dict from path to set of methods + available at that path. + """ + result: Dict[str, Set[str]] = {} + for method, path in methods_and_paths: + result.setdefault(path, set()).add(method) + return result + + all_possible_reg_methods = paths_to_methods_dict(all_possible_registrations) + reg_methods = paths_to_methods_dict(registrations) + + output = {} + + for path, handleable_methods in reg_methods.items(): + if handleable_methods == all_possible_reg_methods[path]: + any_method = next(iter(handleable_methods)) + # TODO This assumes that all methods have the same servlet. + # I suppose that's possibly dubious? + output[("*", path)] = registrations[(any_method, path)] + else: + for method in handleable_methods: + output[(method, path)] = registrations[(method, path)] + + return output + + +def simplify_path_regexes( + registrations: Dict[Tuple[str, str], EndpointDescription] +) -> Dict[Tuple[str, str], EndpointDescription]: + """ + Simplify all the path regexes for the dict of endpoint descriptions, + so that we don't use the Python-specific regex extensions + (and also to remove needlessly specific detail). + """ + + def simplify_path_regex(path: str) -> str: + """ + Given a regex pattern, replaces all named capturing groups (e.g. `(?Pxyz)`) + with a simpler version available in more common regex dialects (e.g. `.*`). + """ + + # TODO it's hard to choose between these two; + # `.*` is a vague simplification + # return GROUP_PATTERN.sub(r"\1", path) + return GROUP_PATTERN.sub(r".*", path) + + return {(m, simplify_path_regex(p)): v for (m, p), v in registrations.items()} + + +def main() -> None: + parser = argparse.ArgumentParser( + description=( + "Updates a synapse database to the latest schema and optionally runs background updates" + " on it." + ) + ) + parser.add_argument("-v", action="store_true") + parser.add_argument( + "--config-path", + type=argparse.FileType("r"), + required=True, + help="Synapse configuration file", + ) + + args = parser.parse_args() + + # TODO + # logging.basicConfig(**logging_config) + + # Load, process and sanity-check the config. + hs_config = yaml.safe_load(args.config_path) + + config = HomeServerConfig() + config.parse_config_dict(hs_config, "", "") + + master_paths = get_registered_paths_for_default(None, config) + worker_paths = get_registered_paths_for_default( + "synapse.app.generic_worker", config + ) + + all_paths = {**master_paths, **worker_paths} + + elided_worker_paths = elide_http_methods_if_unconflicting(worker_paths, all_paths) + elide_http_methods_if_unconflicting(master_paths, all_paths) + + # TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT + + categories_to_methods_and_paths: Dict[ + Optional[str], Dict[Tuple[str, str], EndpointDescription] + ] = defaultdict(dict) + + for (method, path), desc in elided_worker_paths.items(): + categories_to_methods_and_paths[desc.category][method, path] = desc + + for category, contents in categories_to_methods_and_paths.items(): + print_category(category, contents) + + +def print_category( + category_name: Optional[str], + elided_worker_paths: Dict[Tuple[str, str], EndpointDescription], +) -> None: + """ + Prints out a category, in documentation page style. + + Example: + ``` + # Category name + /path/xyz + + GET /path/abc + ``` + """ + + if category_name: + print(f"# {category_name}") + else: + print("# (Uncategorised requests)") + + for ln in sorted( + p for m, p in simplify_path_regexes(elided_worker_paths) if m == "*" + ): + print(ln) + print() + for ln in sorted( + f"{m:6} {p}" for m, p in simplify_path_regexes(elided_worker_paths) if m != "*" + ): + print(ln) + print() + + +if __name__ == "__main__": + main() diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index 2725f53cf6d9..753372fc5476 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -108,6 +108,7 @@ class PublicRoomList(BaseFederationServlet): """ PATH = "/publicRooms" + CATEGORY = "Federation requests" def __init__( self, @@ -212,6 +213,7 @@ class OpenIdUserInfo(BaseFederationServlet): """ PATH = "/openid/userinfo" + CATEGORY = "Federation requests" REQUIRE_AUTH = False diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index f7ca87adc491..ec5b5eeafa29 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -70,6 +70,7 @@ def __init__( class FederationSendServlet(BaseFederationServerServlet): PATH = "/send/(?P[^/]*)/?" + CATEGORY = "Inbound federation transaction request" # We ratelimit manually in the handler as we queue up the requests and we # don't want to fill up the ratelimiter with blocked requests. @@ -138,6 +139,7 @@ async def on_PUT( class FederationEventServlet(BaseFederationServerServlet): PATH = "/event/(?P[^/]*)/?" + CATEGORY = "Federation requests" # This is when someone asks for a data item for a given server data_id pair. async def on_GET( @@ -152,6 +154,7 @@ async def on_GET( class FederationStateV1Servlet(BaseFederationServerServlet): PATH = "/state/(?P[^/]*)/?" + CATEGORY = "Federation requests" # This is when someone asks for all data for a given room. async def on_GET( @@ -170,6 +173,7 @@ async def on_GET( class FederationStateIdsServlet(BaseFederationServerServlet): PATH = "/state_ids/(?P[^/]*)/?" + CATEGORY = "Federation requests" async def on_GET( self, @@ -187,6 +191,7 @@ async def on_GET( class FederationBackfillServlet(BaseFederationServerServlet): PATH = "/backfill/(?P[^/]*)/?" + CATEGORY = "Federation requests" async def on_GET( self, @@ -225,6 +230,7 @@ class FederationTimestampLookupServlet(BaseFederationServerServlet): """ PATH = "/timestamp_to_event/(?P[^/]*)/?" + CATEGORY = "Federation requests" async def on_GET( self, @@ -246,6 +252,7 @@ async def on_GET( class FederationQueryServlet(BaseFederationServerServlet): PATH = "/query/(?P[^/]*)" + CATEGORY = "Federation requests" # This is when we receive a server-server Query async def on_GET( @@ -262,6 +269,7 @@ async def on_GET( class FederationMakeJoinServlet(BaseFederationServerServlet): PATH = "/make_join/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_GET( self, @@ -297,6 +305,7 @@ async def on_GET( class FederationMakeLeaveServlet(BaseFederationServerServlet): PATH = "/make_leave/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_GET( self, @@ -312,6 +321,7 @@ async def on_GET( class FederationV1SendLeaveServlet(BaseFederationServerServlet): PATH = "/send_leave/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_PUT( self, @@ -327,6 +337,7 @@ async def on_PUT( class FederationV2SendLeaveServlet(BaseFederationServerServlet): PATH = "/send_leave/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" PREFIX = FEDERATION_V2_PREFIX @@ -344,6 +355,7 @@ async def on_PUT( class FederationMakeKnockServlet(BaseFederationServerServlet): PATH = "/make_knock/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_GET( self, @@ -366,6 +378,7 @@ async def on_GET( class FederationV1SendKnockServlet(BaseFederationServerServlet): PATH = "/send_knock/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_PUT( self, @@ -381,6 +394,7 @@ async def on_PUT( class FederationEventAuthServlet(BaseFederationServerServlet): PATH = "/event_auth/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_GET( self, @@ -395,6 +409,7 @@ async def on_GET( class FederationV1SendJoinServlet(BaseFederationServerServlet): PATH = "/send_join/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_PUT( self, @@ -412,6 +427,7 @@ async def on_PUT( class FederationV2SendJoinServlet(BaseFederationServerServlet): PATH = "/send_join/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" PREFIX = FEDERATION_V2_PREFIX @@ -455,6 +471,7 @@ async def on_PUT( class FederationV1InviteServlet(BaseFederationServerServlet): PATH = "/invite/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_PUT( self, @@ -479,6 +496,7 @@ async def on_PUT( class FederationV2InviteServlet(BaseFederationServerServlet): PATH = "/invite/(?P[^/]*)/(?P[^/]*)" + CATEGORY = "Federation requests" PREFIX = FEDERATION_V2_PREFIX @@ -515,6 +533,7 @@ async def on_PUT( class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet): PATH = "/exchange_third_party_invite/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_PUT( self, @@ -529,6 +548,7 @@ async def on_PUT( class FederationClientKeysQueryServlet(BaseFederationServerServlet): PATH = "/user/keys/query" + CATEGORY = "Federation requests" async def on_POST( self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] @@ -538,6 +558,7 @@ async def on_POST( class FederationUserDevicesQueryServlet(BaseFederationServerServlet): PATH = "/user/devices/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_GET( self, @@ -551,6 +572,7 @@ async def on_GET( class FederationClientKeysClaimServlet(BaseFederationServerServlet): PATH = "/user/keys/claim" + CATEGORY = "Federation requests" async def on_POST( self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] @@ -561,6 +583,7 @@ async def on_POST( class FederationGetMissingEventsServlet(BaseFederationServerServlet): PATH = "/get_missing_events/(?P[^/]*)" + CATEGORY = "Federation requests" async def on_POST( self, @@ -586,6 +609,7 @@ async def on_POST( class On3pidBindServlet(BaseFederationServerServlet): PATH = "/3pid/onbind" + CATEGORY = "Federation requests" REQUIRE_AUTH = False @@ -618,6 +642,7 @@ async def on_POST( class FederationVersionServlet(BaseFederationServlet): PATH = "/version" + CATEGORY = "Federation requests" REQUIRE_AUTH = False @@ -640,6 +665,7 @@ async def on_GET( class FederationRoomHierarchyServlet(BaseFederationServlet): PATH = "/hierarchy/(?P[^/]*)" + CATEGORY = "Federation requests" def __init__( self, @@ -672,6 +698,7 @@ class RoomComplexityServlet(BaseFederationServlet): PATH = "/rooms/(?P[^/]*)/complexity" PREFIX = FEDERATION_UNSTABLE_PREFIX + CATEGORY = "Federation requests (unstable)" def __init__( self, diff --git a/synapse/rest/client/_base.py b/synapse/rest/client/_base.py index b4cb90cb7667..5c1c19e1f362 100644 --- a/synapse/rest/client/_base.py +++ b/synapse/rest/client/_base.py @@ -43,19 +43,22 @@ def client_patterns( Returns: An iterable of patterns. """ - patterns = [] + versions = [] - if unstable: - unstable_prefix = CLIENT_API_PREFIX + "/unstable" - patterns.append(re.compile("^" + unstable_prefix + path_regex)) if v1: - v1_prefix = CLIENT_API_PREFIX + "/api/v1" - patterns.append(re.compile("^" + v1_prefix + path_regex)) - for release in releases: - new_prefix = CLIENT_API_PREFIX + f"/{release}" - patterns.append(re.compile("^" + new_prefix + path_regex)) + versions.append("api/v1") + versions.extend(releases) + if unstable: + versions.append("unstable") + + if len(versions) == 1: + versions_str = versions[0] + elif len(versions) > 1: + versions_str = "(" + "|".join(versions) + ")" + else: + raise RuntimeError("Must have at least one version for a URL") - return patterns + return [re.compile("^" + CLIENT_API_PREFIX + "/" + versions_str + path_regex)] def set_timeline_upper_limit(filter_json: JsonDict, filter_timeline_limit: int) -> None: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index 484d7440a442..3d0c55daa05c 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -576,6 +576,9 @@ async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: class ThreepidRestServlet(RestServlet): PATTERNS = client_patterns("/account/3pid$") + # This is used as a proxy for all the 3pid endpoints. + + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -834,6 +837,7 @@ def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None: class WhoamiRestServlet(RestServlet): PATTERNS = client_patterns("/account/whoami$") + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index e805196fec06..43193ad08644 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -38,6 +38,7 @@ class AccountDataServlet(RestServlet): PATTERNS = client_patterns( "/user/(?P[^/]*)/account_data/(?P[^/]*)" ) + CATEGORY = "Account data requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -136,6 +137,7 @@ class RoomAccountDataServlet(RestServlet): "/rooms/(?P[^/]*)" "/account_data/(?P[^/]*)" ) + CATEGORY = "Account data requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index dab4a77f7e7c..e97d0bf475ba 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -40,6 +40,7 @@ class DevicesRestServlet(RestServlet): PATTERNS = client_patterns("/devices$") + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -123,6 +124,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: class DeviceRestServlet(RestServlet): PATTERNS = client_patterns("/devices/(?P[^/]*)$") + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/events.py b/synapse/rest/client/events.py index 694d77d28730..3eca4fe21ff1 100644 --- a/synapse/rest/client/events.py +++ b/synapse/rest/client/events.py @@ -33,6 +33,7 @@ class EventStreamRestServlet(RestServlet): PATTERNS = client_patterns("/events$", v1=True) + CATEGORY = "Sync requests" DEFAULT_LONGPOLL_TIME_MS = 30000 @@ -76,6 +77,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: class EventRestServlet(RestServlet): PATTERNS = client_patterns("/events/(?P[^/]*)$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index 236199897ce0..ab7d8c94191b 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -31,6 +31,7 @@ class GetFilterRestServlet(RestServlet): PATTERNS = client_patterns("/user/(?P[^/]*)/filter/(?P[^/]*)") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -69,6 +70,7 @@ async def on_GET( class CreateFilterRestServlet(RestServlet): PATTERNS = client_patterns("/user/(?P[^/]*)/filter") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/initial_sync.py b/synapse/rest/client/initial_sync.py index 9b1bb8b52124..046a4364f2fd 100644 --- a/synapse/rest/client/initial_sync.py +++ b/synapse/rest/client/initial_sync.py @@ -28,6 +28,7 @@ # TODO: Needs unit testing class InitialSyncRestServlet(RestServlet): PATTERNS = client_patterns("/initialSync$", v1=True) + CATEGORY = "Sync requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 32bb8b9a91a6..6209b79b019e 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -89,6 +89,7 @@ class KeyUploadServlet(RestServlet): """ PATTERNS = client_patterns("/keys/upload(/(?P[^/]+))?$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -182,6 +183,7 @@ class KeyQueryServlet(RestServlet): """ PATTERNS = client_patterns("/keys/query$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -225,6 +227,7 @@ class KeyChangesServlet(RestServlet): """ PATTERNS = client_patterns("/keys/changes$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -274,6 +277,7 @@ class OneTimeKeyServlet(RestServlet): """ PATTERNS = client_patterns("/keys/claim$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/knock.py b/synapse/rest/client/knock.py index 4fa66904baa1..0dc7960872df 100644 --- a/synapse/rest/client/knock.py +++ b/synapse/rest/client/knock.py @@ -40,6 +40,7 @@ class KnockRoomAliasServlet(RestServlet): """ PATTERNS = client_patterns("/knock/(?P[^/]*)") + CATEGORY = "Event sending requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 8adced41e5e9..b7e9c8f6b572 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -72,6 +72,8 @@ class LoginResponse(TypedDict, total=False): class LoginRestServlet(RestServlet): PATTERNS = client_patterns("/login$", v1=True) + CATEGORY = "Registration/login requests" + CAS_TYPE = "m.login.cas" SSO_TYPE = "m.login.sso" TOKEN_TYPE = "m.login.token" @@ -537,6 +539,7 @@ def _get_auth_flow_dict_for_idp(idp: SsoIdentityProvider) -> JsonDict: class RefreshTokenServlet(RestServlet): PATTERNS = client_patterns("/refresh$") + CATEGORY = "Registration/login requests" def __init__(self, hs: "HomeServer"): self._auth_handler = hs.get_auth_handler() @@ -590,6 +593,7 @@ class SsoRedirectServlet(RestServlet): + "/(r0|v3)/login/sso/redirect/(?P[A-Za-z0-9_.~-]+)$" ) ] + CATEGORY = "SSO requests needed for all SSO providers" def __init__(self, hs: "HomeServer"): # make sure that the relevant handlers are instantiated, so that they diff --git a/synapse/rest/client/presence.py b/synapse/rest/client/presence.py index 94dd4fe2f45c..8e193330f8bc 100644 --- a/synapse/rest/client/presence.py +++ b/synapse/rest/client/presence.py @@ -33,6 +33,7 @@ class PresenceStatusRestServlet(RestServlet): PATTERNS = client_patterns("/presence/(?P[^/]*)/status", v1=True) + CATEGORY = "Presence requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index e69fa0829dbd..493e1acea06e 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -29,6 +29,7 @@ class ProfileDisplaynameRestServlet(RestServlet): PATTERNS = client_patterns("/profile/(?P[^/]*)/displayname", v1=True) + CATEGORY = "Event sending requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -86,6 +87,7 @@ async def on_PUT( class ProfileAvatarURLRestServlet(RestServlet): PATTERNS = client_patterns("/profile/(?P[^/]*)/avatar_url", v1=True) + CATEGORY = "Event sending requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -142,6 +144,7 @@ async def on_PUT( class ProfileRestServlet(RestServlet): PATTERNS = client_patterns("/profile/(?P[^/]*)", v1=True) + CATEGORY = "Event sending requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index ad5c10c99dd8..1147b6f8ecca 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -44,6 +44,9 @@ class PushRuleRestServlet(RestServlet): "Unrecognised request: You probably wanted a trailing slash" ) + WORKERS_DENIED_METHODS = ["PUT", "DELETE"] + CATEGORY = "Push rule requests" + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() diff --git a/synapse/rest/client/read_marker.py b/synapse/rest/client/read_marker.py index 852838515cce..4f96e51eeb93 100644 --- a/synapse/rest/client/read_marker.py +++ b/synapse/rest/client/read_marker.py @@ -31,6 +31,7 @@ class ReadMarkerRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/read_markers$") + CATEGORY = "Receipts requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py index 28b7d30ea846..316e7b99821e 100644 --- a/synapse/rest/client/receipts.py +++ b/synapse/rest/client/receipts.py @@ -36,6 +36,7 @@ class ReceiptRestServlet(RestServlet): "/receipt/(?P[^/]*)" "/(?P[^/]*)$" ) + CATEGORY = "Receipts requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 4adb5271d22f..7f84a17e2964 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -367,6 +367,7 @@ class RegistrationTokenValidityRestServlet(RestServlet): f"/register/{LoginType.REGISTRATION_TOKEN}/validity", releases=("v1",), ) + CATEGORY = "Registration/login requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -395,6 +396,7 @@ async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: class RegisterRestServlet(RestServlet): PATTERNS = client_patterns("/register$") + CATEGORY = "Registration/login requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index 7456d6f50724..b8b296bc0cb8 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -42,6 +42,7 @@ class RelationPaginationServlet(RestServlet): "(/(?P[^/]*)(/(?P[^/]*))?)?$", releases=("v1",), ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -84,6 +85,7 @@ async def on_GET( class ThreadsServlet(RestServlet): PATTERNS = (re.compile("^/_matrix/client/v1/rooms/(?P[^/]*)/threads"),) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 129b6fe6b07d..c0705d42914e 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -140,7 +140,7 @@ def __init__(self, hs: "HomeServer"): class RoomCreateRestServlet(TransactionRestServlet): - # No PATTERN; we have custom dispatch rules here + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__(hs) @@ -180,6 +180,8 @@ def get_room_config(self, request: Request) -> JsonDict: # TODO: Needs unit testing for generic events class RoomStateEventRestServlet(RestServlet): + CATEGORY = "Event sending requests" + def __init__(self, hs: "HomeServer"): super().__init__() self.event_creation_handler = hs.get_event_creation_handler() @@ -323,6 +325,8 @@ async def on_PUT( # TODO: Needs unit testing for generic events + feedback class RoomSendEventRestServlet(TransactionRestServlet): + CATEGORY = "Event sending requests" + def __init__(self, hs: "HomeServer"): super().__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() @@ -398,6 +402,8 @@ async def on_PUT( # TODO: Needs unit testing for room ID + alias joins class JoinRoomAliasServlet(ResolveRoomIdMixin, TransactionRestServlet): + CATEGORY = "Event sending requests" + def __init__(self, hs: "HomeServer"): super().__init__(hs) super(ResolveRoomIdMixin, self).__init__(hs) # ensure the Mixin is set up @@ -460,6 +466,7 @@ async def on_PUT( # TODO: Needs unit testing class PublicRoomListRestServlet(RestServlet): PATTERNS = client_patterns("/publicRooms$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -578,6 +585,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # TODO: Needs unit testing class RoomMemberListRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/members$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -633,6 +641,7 @@ async def on_GET( # except it does custom AS logic and has a simpler return format class JoinedRoomMemberListRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/joined_members$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -654,6 +663,10 @@ async def on_GET( # TODO: Needs better unit testing class RoomMessageListRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/messages$", v1=True) + # TODO The routing information should be exposed programatically. + # I want to do this but for now I felt bad about leaving this without + # at least a visible warning on it. + CATEGORY = "Client API requests (ALL FOR SAME ROOM MUST GO TO SAME WORKER)" def __init__(self, hs: "HomeServer"): super().__init__() @@ -720,6 +733,7 @@ async def on_GET( # TODO: Needs unit testing class RoomStateRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/state$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -742,6 +756,7 @@ async def on_GET( # TODO: Needs unit testing class RoomInitialSyncRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/initialSync$", v1=True) + CATEGORY = "Sync requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -766,6 +781,7 @@ class RoomEventServlet(RestServlet): PATTERNS = client_patterns( "/rooms/(?P[^/]*)/event/(?P[^/]*)$", v1=True ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -858,6 +874,7 @@ class RoomEventContextServlet(RestServlet): PATTERNS = client_patterns( "/rooms/(?P[^/]*)/context/(?P[^/]*)$", v1=True ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -958,6 +975,8 @@ async def on_PUT( # TODO: Needs unit testing class RoomMembershipRestServlet(TransactionRestServlet): + CATEGORY = "Event sending requests" + def __init__(self, hs: "HomeServer"): super().__init__(hs) self.room_member_handler = hs.get_room_member_handler() @@ -1071,6 +1090,8 @@ async def on_PUT( class RoomRedactEventRestServlet(TransactionRestServlet): + CATEGORY = "Event sending requests" + def __init__(self, hs: "HomeServer"): super().__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() @@ -1164,6 +1185,7 @@ class RoomTypingRestServlet(RestServlet): PATTERNS = client_patterns( "/rooms/(?P[^/]*)/typing/(?P[^/]*)$", v1=True ) + CATEGORY = "The typing stream" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1195,7 +1217,7 @@ async def on_PUT( # Limit timeout to stop people from setting silly typing timeouts. timeout = min(content.get("timeout", 30000), 120000) - # Defer getting the typing handler since it will raise on workers. + # Defer getting the typing handler since it will raise on WORKER_PATTERNS. typing_handler = self.hs.get_typing_writer_handler() try: @@ -1224,6 +1246,7 @@ class RoomAliasListServlet(RestServlet): r"/rooms/(?P[^/]*)/aliases" ), ] + list(client_patterns("/rooms/(?P[^/]*)/aliases$", unstable=False)) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1244,6 +1267,7 @@ async def on_GET( class SearchRestServlet(RestServlet): PATTERNS = client_patterns("/search$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1263,6 +1287,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: class JoinedRoomsRestServlet(RestServlet): PATTERNS = client_patterns("/joined_rooms$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1334,6 +1359,7 @@ class TimestampLookupRestServlet(RestServlet): PATTERNS = ( re.compile("^/_matrix/client/v1/rooms/(?P[^/]*)/timestamp_to_event$"), ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1365,6 +1391,8 @@ async def on_GET( class RoomHierarchyRestServlet(RestServlet): PATTERNS = (re.compile("^/_matrix/client/v1/rooms/(?P[^/]*)/hierarchy$"),) + WORKERS = PATTERNS + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -1405,6 +1433,7 @@ class RoomSummaryRestServlet(ResolveRoomIdMixin, RestServlet): "/rooms/(?P[^/]*)/summary$" ), ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__(hs) diff --git a/synapse/rest/client/room_batch.py b/synapse/rest/client/room_batch.py index ef284ecc1147..69f85112d8fe 100644 --- a/synapse/rest/client/room_batch.py +++ b/synapse/rest/client/room_batch.py @@ -69,6 +69,7 @@ class RoomBatchSendEventRestServlet(RestServlet): "/rooms/(?P[^/]*)/batch_send$" ), ) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/room_keys.py b/synapse/rest/client/room_keys.py index 4e7ffdb5551e..aad54f8c5497 100644 --- a/synapse/rest/client/room_keys.py +++ b/synapse/rest/client/room_keys.py @@ -37,6 +37,7 @@ class RoomKeysServlet(RestServlet): PATTERNS = client_patterns( "/room_keys/keys(/(?P[^/]+))?(/(?P[^/]+))?$" ) + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -253,6 +254,7 @@ async def on_DELETE( class RoomKeysNewVersionServlet(RestServlet): PATTERNS = client_patterns("/room_keys/version$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -328,6 +330,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: class RoomKeysVersionServlet(RestServlet): PATTERNS = client_patterns("/room_keys/version/(?P[^/]+)$") + CATEGORY = "Encryption requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/sendtodevice.py b/synapse/rest/client/sendtodevice.py index 110af6df473f..7dfa3a25969c 100644 --- a/synapse/rest/client/sendtodevice.py +++ b/synapse/rest/client/sendtodevice.py @@ -35,6 +35,7 @@ class SendToDeviceRestServlet(servlet.RestServlet): PATTERNS = client_patterns( "/sendToDevice/(?P[^/]*)/(?P[^/]*)$" ) + CATEGORY = "The to_device stream" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index e578b26fa3d9..03b05789456b 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -87,6 +87,7 @@ class SyncRestServlet(RestServlet): PATTERNS = client_patterns("/sync$") ALLOWED_PRESENCE = {"online", "offline", "unavailable"} + CATEGORY = "Sync requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/tags.py b/synapse/rest/client/tags.py index dde08417a407..94bd51fe8fa3 100644 --- a/synapse/rest/client/tags.py +++ b/synapse/rest/client/tags.py @@ -37,6 +37,7 @@ class TagListServlet(RestServlet): PATTERNS = client_patterns( "/user/(?P[^/]*)/rooms/(?P[^/]*)/tags$" ) + CATEGORY = "Account data requests" def __init__(self, hs: "HomeServer"): super().__init__() @@ -64,6 +65,7 @@ class TagServlet(RestServlet): PATTERNS = client_patterns( "/user/(?P[^/]*)/rooms/(?P[^/]*)/tags/(?P[^/]*)" ) + CATEGORY = "Account data requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py index 4670fad60868..5136497c777d 100644 --- a/synapse/rest/client/user_directory.py +++ b/synapse/rest/client/user_directory.py @@ -31,6 +31,7 @@ class UserDirectorySearchRestServlet(RestServlet): PATTERNS = client_patterns("/user_directory/search$") + CATEGORY = "User directory search requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index ec171582b3d6..59aed66464e5 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -34,6 +34,7 @@ class VersionsRestServlet(RestServlet): PATTERNS = [re.compile("^/_matrix/client/versions$")] + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/client/voip.py b/synapse/rest/client/voip.py index ea7e025156da..133790c97c4d 100644 --- a/synapse/rest/client/voip.py +++ b/synapse/rest/client/voip.py @@ -29,6 +29,7 @@ class VoipRestServlet(RestServlet): PATTERNS = client_patterns("/voip/turnServer$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 19820886f536..3bdb6ec9098d 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -93,6 +93,8 @@ class RemoteKey(RestServlet): } """ + CATEGORY = "Federation requests" + def __init__(self, hs: "HomeServer"): self.fetcher = ServerKeyFetcher(hs) self.store = hs.get_datastores().main From e6af49fbea939d9e69ed05e0a0ced5948c722ea4 Mon Sep 17 00:00:00 2001 From: Nick Mills-Barrett Date: Fri, 24 Mar 2023 11:44:01 +0000 Subject: [PATCH 012/106] Reintroduce membership tables event stream ordering (#15128) * Add `event_stream_ordering` column to membership state tables Specifically this adds the column to `current_state_events`, `local_current_membership` and `room_memberships`. Each of these tables is regularly joined with the `events` table to get the stream ordering and denormalising this into each table will yield significant query performance improvements once used. * Make denormalised `event_stream_ordering` columns foreign keys * Add comment in schema file explaining new denormalised columns * Add triggers to enforce consistency of `event_stream_ordering` columns * Re-order purge room tables to account for foreign keys * Bump schema version to 75 Co-authored-by: David Robertson Co-authored-by: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> --- changelog.d/15128.misc | 1 + synapse/storage/databases/main/events.py | 23 ++++-- .../storage/databases/main/purge_events.py | 6 +- synapse/storage/schema/__init__.py | 14 +++- ...embership_tables_event_stream_ordering.sql | 20 +++++ ...p_tables_event_stream_ordering_triggers.py | 79 +++++++++++++++++++ 6 files changed, 131 insertions(+), 12 deletions(-) create mode 100644 changelog.d/15128.misc create mode 100644 synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql create mode 100644 synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py diff --git a/changelog.d/15128.misc b/changelog.d/15128.misc new file mode 100644 index 000000000000..c09911e48d2f --- /dev/null +++ b/changelog.d/15128.misc @@ -0,0 +1 @@ +Add denormalised event stream ordering column to membership state tables for future use. Contributed by Nick @ Beeper (@fizzadar). diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index a8a4ed4436f6..193959b25022 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1126,11 +1126,15 @@ def _update_current_state_txn( # been inserted into room_memberships. txn.execute_batch( """INSERT INTO current_state_events - (room_id, type, state_key, event_id, membership) - VALUES (?, ?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) + (room_id, type, state_key, event_id, membership, event_stream_ordering) + VALUES ( + ?, ?, ?, ?, + (SELECT membership FROM room_memberships WHERE event_id = ?), + (SELECT stream_ordering FROM events WHERE event_id = ?) + ) """, [ - (room_id, key[0], key[1], ev_id, ev_id) + (room_id, key[0], key[1], ev_id, ev_id, ev_id) for key, ev_id in to_insert.items() ], ) @@ -1157,11 +1161,15 @@ def _update_current_state_txn( if to_insert: txn.execute_batch( """INSERT INTO local_current_membership - (room_id, user_id, event_id, membership) - VALUES (?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) + (room_id, user_id, event_id, membership, event_stream_ordering) + VALUES ( + ?, ?, ?, + (SELECT membership FROM room_memberships WHERE event_id = ?), + (SELECT stream_ordering FROM events WHERE event_id = ?) + ) """, [ - (room_id, key[1], ev_id, ev_id) + (room_id, key[1], ev_id, ev_id, ev_id) for key, ev_id in to_insert.items() if key[0] == EventTypes.Member and self.is_mine_id(key[1]) ], @@ -1769,6 +1777,7 @@ def _store_room_members_txn( table="room_memberships", keys=( "event_id", + "event_stream_ordering", "user_id", "sender", "room_id", @@ -1779,6 +1788,7 @@ def _store_room_members_txn( values=[ ( event.event_id, + event.internal_metadata.stream_ordering, event.state_key, event.user_id, event.room_id, @@ -1811,6 +1821,7 @@ def _store_room_members_txn( keyvalues={"room_id": event.room_id, "user_id": event.state_key}, values={ "event_id": event.event_id, + "event_stream_ordering": event.internal_metadata.stream_ordering, "membership": event.membership, }, ) diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index 7a7c0d9c753d..efbd3e75d99e 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -428,14 +428,16 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: "partial_state_events", "partial_state_rooms_servers", "partial_state_rooms", + # Note: the _membership(s) tables have foreign keys to the `events` table + # so must be deleted first. + "local_current_membership", + "room_memberships", "events", "federation_inbound_events_staging", - "local_current_membership", "receipts_graph", "receipts_linearized", "room_aliases", "room_depth", - "room_memberships", "room_stats_state", "room_stats_current", "room_stats_earliest_token", diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index d3103a6c7a05..a28f2b997cef 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 74 # remember to update the list below when updating +SCHEMA_VERSION = 75 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -91,13 +91,19 @@ - A query on `event_stream_ordering` column has now been disambiguated (i.e. the codebase can handle the `current_state_events`, `local_current_memberships` and `room_memberships` tables having an `event_stream_ordering` column). + +Changes in SCHEMA_VERSION = 75: + - The `event_stream_ordering` column in membership tables (`current_state_events`, + `local_current_membership` & `room_memberships`) is now being populated for new + rows. When the background job to populate historical rows lands this will + become the compat schema version. """ SCHEMA_COMPAT_VERSION = ( - # The threads_id column must exist for event_push_actions, event_push_summary, - # receipts_linearized, and receipts_graph. - 73 + # Queries against `event_stream_ordering` columns in membership tables must + # be disambiguated. + 74 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql b/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql new file mode 100644 index 000000000000..e2608f3a2e83 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql @@ -0,0 +1,20 @@ +/* Copyright 2022 Beeper + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Each of these are denormalised copies of `stream_ordering` from the corresponding row in` events` which +-- we use to improve database performance by reduring JOINs. +ALTER TABLE current_state_events ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); +ALTER TABLE local_current_membership ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); +ALTER TABLE room_memberships ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); diff --git a/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py b/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py new file mode 100644 index 000000000000..e32e9083b359 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py @@ -0,0 +1,79 @@ +# Copyright 2022 Beeper +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +This migration adds triggers to the room membership tables to enforce consistency. +Triggers cannot be expressed in .sql files, so we have to use a separate file. +""" +from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine +from synapse.storage.types import Cursor + + +def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): + # Complain if the `event_stream_ordering` in membership tables doesn't match + # the `stream_ordering` row with the same `event_id` in `events`. + if isinstance(database_engine, Sqlite3Engine): + for table in ( + "current_state_events", + "local_current_membership", + "room_memberships", + ): + cur.execute( + f""" + CREATE TRIGGER IF NOT EXISTS {table}_bad_event_stream_ordering + BEFORE INSERT ON {table} + FOR EACH ROW + BEGIN + SELECT RAISE(ABORT, 'Incorrect event_stream_ordering in {table}') + WHERE EXISTS ( + SELECT 1 FROM events + WHERE events.event_id = NEW.event_id + AND events.stream_ordering != NEW.event_stream_ordering + ); + END; + """ + ) + elif isinstance(database_engine, PostgresEngine): + cur.execute( + """ + CREATE OR REPLACE FUNCTION check_event_stream_ordering() RETURNS trigger AS $BODY$ + BEGIN + IF EXISTS ( + SELECT 1 FROM events + WHERE events.event_id = NEW.event_id + AND events.stream_ordering != NEW.event_stream_ordering + ) THEN + RAISE EXCEPTION 'Incorrect event_stream_ordering'; + END IF; + RETURN NEW; + END; + $BODY$ LANGUAGE plpgsql; + """ + ) + + for table in ( + "current_state_events", + "local_current_membership", + "room_memberships", + ): + cur.execute( + f""" + CREATE TRIGGER check_event_stream_ordering BEFORE INSERT OR UPDATE ON {table} + FOR EACH ROW + EXECUTE PROCEDURE check_event_stream_ordering() + """ + ) + else: + raise NotImplementedError("Unknown database engine") From 68a671731207645f693e4e48676781b9a1acb838 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 24 Mar 2023 08:31:14 -0400 Subject: [PATCH 013/106] Reject mentions on the C-S API which are invalid. (#15311) Invalid mentions data received over the Client-Server API should be rejected with a 400 error. This will hopefully stop clients from sending invalid data, although does not help with data received over federation. --- changelog.d/15311.misc | 1 + synapse/events/validator.py | 42 ++++++--- synapse/http/servlet.py | 22 +++-- tests/push/test_bulk_push_rule_evaluator.py | 94 ++++++++++++--------- 4 files changed, 105 insertions(+), 54 deletions(-) create mode 100644 changelog.d/15311.misc diff --git a/changelog.d/15311.misc b/changelog.d/15311.misc new file mode 100644 index 000000000000..ce03cb95238f --- /dev/null +++ b/changelog.d/15311.misc @@ -0,0 +1 @@ +Reject events with an invalid "mentions" property pert [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 6f0e4386d3f1..47203209db2f 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -12,11 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. import collections.abc -from typing import Iterable, Type, Union, cast +from typing import Iterable, List, Type, Union, cast import jsonschema +from pydantic import Field, StrictBool, StrictStr -from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership +from synapse.api.constants import ( + MAX_ALIAS_LENGTH, + EventContentFields, + EventTypes, + Membership, +) from synapse.api.errors import Codes, SynapseError from synapse.api.room_versions import EventFormatVersions from synapse.config.homeserver import HomeServerConfig @@ -28,6 +34,8 @@ validate_canonicaljson, ) from synapse.federation.federation_server import server_matches_acl_event +from synapse.http.servlet import validate_json_object +from synapse.rest.models import RequestBodyModel from synapse.types import EventID, JsonDict, RoomID, UserID @@ -88,27 +96,27 @@ def validate_new(self, event: EventBase, config: HomeServerConfig) -> None: Codes.INVALID_PARAM, ) - if event.type == EventTypes.Retention: + elif event.type == EventTypes.Retention: self._validate_retention(event) - if event.type == EventTypes.ServerACL: + elif event.type == EventTypes.ServerACL: if not server_matches_acl_event(config.server.server_name, event): raise SynapseError( 400, "Can't create an ACL event that denies the local server" ) - if event.type == EventTypes.PowerLevels: + elif event.type == EventTypes.PowerLevels: try: jsonschema.validate( instance=event.content, schema=POWER_LEVELS_SCHEMA, - cls=plValidator, + cls=POWER_LEVELS_VALIDATOR, ) except jsonschema.ValidationError as e: if e.path: # example: "users_default": '0' is not of type 'integer' # cast safety: path entries can be integers, if we fail to validate - # items in an array. However the POWER_LEVELS_SCHEMA doesn't expect + # items in an array. However, the POWER_LEVELS_SCHEMA doesn't expect # to see any arrays. message = ( '"' + cast(str, e.path[-1]) + '": ' + e.message # noqa: B306 @@ -125,6 +133,15 @@ def validate_new(self, event: EventBase, config: HomeServerConfig) -> None: errcode=Codes.BAD_JSON, ) + # If the event contains a mentions key, validate it. + if ( + EventContentFields.MSC3952_MENTIONS in event.content + and config.experimental.msc3952_intentional_mentions + ): + validate_json_object( + event.content[EventContentFields.MSC3952_MENTIONS], Mentions + ) + def _validate_retention(self, event: EventBase) -> None: """Checks that an event that defines the retention policy for a room respects the format enforced by the spec. @@ -253,10 +270,15 @@ def _ensure_state_event(self, event: Union[EventBase, EventBuilder]) -> None: } +class Mentions(RequestBodyModel): + user_ids: List[StrictStr] = Field(default_factory=list) + room: StrictBool = False + + # This could return something newer than Draft 7, but that's the current "latest" # validator. -def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]: - validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA) +def _create_validator(schema: JsonDict) -> Type[jsonschema.Draft7Validator]: + validator = jsonschema.validators.validator_for(schema) # by default jsonschema does not consider a immutabledict to be an object so # we need to use a custom type checker @@ -268,4 +290,4 @@ def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]: return jsonschema.validators.extend(validator, type_checker=type_checker) -plValidator = _create_power_level_validator() +POWER_LEVELS_VALIDATOR = _create_validator(POWER_LEVELS_SCHEMA) diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 0070bd2940ea..fc6279362881 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -778,17 +778,13 @@ def parse_json_object_from_request( Model = TypeVar("Model", bound=BaseModel) -def parse_and_validate_json_object_from_request( - request: Request, model_type: Type[Model] -) -> Model: - """Parse a JSON object from the body of a twisted HTTP request, then deserialise and - validate using the given pydantic model. +def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model: + """Validate a deserialized JSON object using the given pydantic model. Raises: SynapseError if the request body couldn't be decoded as JSON or if it wasn't a JSON object. """ - content = parse_json_object_from_request(request, allow_empty_body=False) try: instance = model_type.parse_obj(content) except ValidationError as e: @@ -811,6 +807,20 @@ def parse_and_validate_json_object_from_request( return instance +def parse_and_validate_json_object_from_request( + request: Request, model_type: Type[Model] +) -> Model: + """Parse a JSON object from the body of a twisted HTTP request, then deserialise and + validate using the given pydantic model. + + Raises: + SynapseError if the request body couldn't be decoded as JSON or + if it wasn't a JSON object. + """ + content = parse_json_object_from_request(request, allow_empty_body=False) + return validate_json_object(content, model_type) + + def assert_params_in_dict(body: JsonDict, required: Iterable[str]) -> None: absent = [] for k in required: diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py index 46df0102f77a..9501096a7732 100644 --- a/tests/push/test_bulk_push_rule_evaluator.py +++ b/tests/push/test_bulk_push_rule_evaluator.py @@ -243,22 +243,28 @@ def test_user_mentions(self) -> None: ) # Non-dict mentions should be ignored. - mentions: Any - for mentions in (None, True, False, 1, "foo", []): - self.assertFalse( - self._create_and_process( - bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: mentions} + # + # Avoid C-S validation as these aren't expected. + with patch( + "synapse.events.validator.EventValidator.validate_new", + new=lambda s, event, config: True, + ): + mentions: Any + for mentions in (None, True, False, 1, "foo", []): + self.assertFalse( + self._create_and_process( + bulk_evaluator, {EventContentFields.MSC3952_MENTIONS: mentions} + ) ) - ) - # A non-list should be ignored. - for mentions in (None, True, False, 1, "foo", {}): - self.assertFalse( - self._create_and_process( - bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"user_ids": mentions}}, + # A non-list should be ignored. + for mentions in (None, True, False, 1, "foo", {}): + self.assertFalse( + self._create_and_process( + bulk_evaluator, + {EventContentFields.MSC3952_MENTIONS: {"user_ids": mentions}}, + ) ) - ) # The Matrix ID appearing anywhere in the list should notify. self.assertTrue( @@ -291,26 +297,32 @@ def test_user_mentions(self) -> None: ) # Invalid entries in the list are ignored. - self.assertFalse( - self._create_and_process( - bulk_evaluator, - { - EventContentFields.MSC3952_MENTIONS: { - "user_ids": [None, True, False, {}, []] - } - }, + # + # Avoid C-S validation as these aren't expected. + with patch( + "synapse.events.validator.EventValidator.validate_new", + new=lambda s, event, config: True, + ): + self.assertFalse( + self._create_and_process( + bulk_evaluator, + { + EventContentFields.MSC3952_MENTIONS: { + "user_ids": [None, True, False, {}, []] + } + }, + ) ) - ) - self.assertTrue( - self._create_and_process( - bulk_evaluator, - { - EventContentFields.MSC3952_MENTIONS: { - "user_ids": [None, True, False, {}, [], self.alice] - } - }, + self.assertTrue( + self._create_and_process( + bulk_evaluator, + { + EventContentFields.MSC3952_MENTIONS: { + "user_ids": [None, True, False, {}, [], self.alice] + } + }, + ) ) - ) # The legacy push rule should not mention if the mentions field exists. self.assertFalse( @@ -351,14 +363,20 @@ def test_room_mentions(self) -> None: ) # Invalid data should not notify. - mentions: Any - for mentions in (None, False, 1, "foo", [], {}): - self.assertFalse( - self._create_and_process( - bulk_evaluator, - {EventContentFields.MSC3952_MENTIONS: {"room": mentions}}, + # + # Avoid C-S validation as these aren't expected. + with patch( + "synapse.events.validator.EventValidator.validate_new", + new=lambda s, event, config: True, + ): + mentions: Any + for mentions in (None, False, 1, "foo", [], {}): + self.assertFalse( + self._create_and_process( + bulk_evaluator, + {EventContentFields.MSC3952_MENTIONS: {"room": mentions}}, + ) ) - ) # The legacy push rule should not mention if the mentions field exists. self.assertFalse( From 5b70f240cf70b390db7e74ab614ace108fc08d70 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 24 Mar 2023 16:09:39 +0100 Subject: [PATCH 014/106] Make cleaning up pushers depend on the device_id instead of the token_id (#15280) This makes it so that we rely on the `device_id` to delete pushers on logout, instead of relying on the `access_token_id`. This ensures we're not removing pushers on token refresh, and prepares for a world without access token IDs (also known as the OIDC). This actually runs the `set_device_id_for_pushers` background update, which was forgotten in #13831. Note that for backwards compatibility it still deletes pushers based on the `access_token` until the background update finishes. --- changelog.d/15280.misc | 1 + synapse/_scripts/synapse_port_db.py | 6 +- synapse/handlers/auth.py | 8 ++- synapse/handlers/device.py | 2 + synapse/handlers/register.py | 4 +- synapse/push/__init__.py | 7 ++- synapse/push/pusherpool.py | 58 ++++++++++++++----- synapse/rest/admin/users.py | 1 - synapse/rest/client/pusher.py | 1 - synapse/storage/databases/main/pusher.py | 40 ++++++++++--- ...02_set_device_id_for_pushers_bg_update.sql | 19 ++++++ tests/push/test_email.py | 6 +- tests/push/test_http.py | 46 +++++++-------- tests/replication/test_pusher_shard.py | 4 +- tests/rest/admin/test_user.py | 4 +- 15 files changed, 142 insertions(+), 65 deletions(-) create mode 100644 changelog.d/15280.misc create mode 100644 synapse/storage/schema/main/delta/74/02_set_device_id_for_pushers_bg_update.sql diff --git a/changelog.d/15280.misc b/changelog.d/15280.misc new file mode 100644 index 000000000000..41d56b0cf0b4 --- /dev/null +++ b/changelog.d/15280.misc @@ -0,0 +1 @@ +Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 94b86c1d6ff5..1dcb397ba456 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -68,7 +68,10 @@ MediaRepositoryBackgroundUpdateStore, ) from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore -from synapse.storage.databases.main.pusher import PusherWorkerStore +from synapse.storage.databases.main.pusher import ( + PusherBackgroundUpdatesStore, + PusherWorkerStore, +) from synapse.storage.databases.main.receipts import ReceiptsBackgroundUpdateStore from synapse.storage.databases.main.registration import ( RegistrationBackgroundUpdateStore, @@ -226,6 +229,7 @@ class Store( AccountDataWorkerStore, PushRuleStore, PusherWorkerStore, + PusherBackgroundUpdatesStore, PresenceBackgroundUpdateStore, ReceiptsBackgroundUpdateStore, RelationsWorkerStore, diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 308e38edea2d..1e89447044f5 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -1504,8 +1504,10 @@ async def delete_access_token(self, access_token: str) -> None: ) # delete pushers associated with this access token + # XXX(quenting): This is only needed until the 'set_device_id_for_pushers' + # background update completes. if token.token_id is not None: - await self.hs.get_pusherpool().remove_pushers_by_access_token( + await self.hs.get_pusherpool().remove_pushers_by_access_tokens( token.user_id, (token.token_id,) ) @@ -1535,7 +1537,9 @@ async def delete_access_tokens_for_user( ) # delete pushers associated with the access tokens - await self.hs.get_pusherpool().remove_pushers_by_access_token( + # XXX(quenting): This is only needed until the 'set_device_id_for_pushers' + # background update completes. + await self.hs.get_pusherpool().remove_pushers_by_access_tokens( user_id, (token_id for _, token_id, _ in tokens_and_devices) ) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 6f7963df43ae..9ded6389acdb 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -503,6 +503,8 @@ async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: else: raise + await self.hs.get_pusherpool().remove_pushers_by_devices(user_id, device_ids) + # Delete data specific to each device. Not optimised as it is not # considered as part of a critical path. for device_id in device_ids: diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 6b110dcb6e4d..c8bf2439afb5 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -1013,11 +1013,11 @@ async def _register_email_threepid( user_tuple = await self.store.get_user_by_access_token(token) # The token better still exist. assert user_tuple - token_id = user_tuple.token_id + device_id = user_tuple.device_id await self.pusher_pool.add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="email", app_id="m.email", app_display_name="Email Notifications", diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index a0c760239db8..9e3a98741a4f 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -103,7 +103,7 @@ class PusherConfig: id: Optional[str] user_name: str - access_token: Optional[int] + profile_tag: str kind: str app_id: str @@ -119,6 +119,11 @@ class PusherConfig: enabled: bool device_id: Optional[str] + # XXX(quenting): The access_token is not persisted anymore for new pushers, but we + # keep it when reading from the database, so that we don't get stale pushers + # while the "set_device_id_for_pushers" background update is running. + access_token: Optional[int] + def as_dict(self) -> Dict[str, Any]: """Information that can be retrieved about a pusher after creation.""" return { diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index e2648cbc93c9..6517e3566fae 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -25,7 +25,7 @@ from synapse.push import Pusher, PusherConfig, PusherConfigException from synapse.push.pusher import PusherFactory from synapse.replication.http.push import ReplicationRemovePusherRestServlet -from synapse.types import JsonDict, RoomStreamToken +from synapse.types import JsonDict, RoomStreamToken, StrCollection from synapse.util.async_helpers import concurrently_execute from synapse.util.threepids import canonicalise_email @@ -97,7 +97,6 @@ def start(self) -> None: async def add_or_update_pusher( self, user_id: str, - access_token: Optional[int], kind: str, app_id: str, app_display_name: str, @@ -128,6 +127,22 @@ async def add_or_update_pusher( # stream ordering, so it will process pushes from this point onwards. last_stream_ordering = self.store.get_room_max_stream_ordering() + # Before we actually persist the pusher, we check if the user already has one + # for this app ID and pushkey. If so, we want to keep the access token and + # device ID in place, since this could be one device modifying + # (e.g. enabling/disabling) another device's pusher. + # XXX(quenting): Even though we're not persisting the access_token_id for new + # pushers anymore, we still need to copy existing access_token_ids over when + # updating a pusher, in case the "set_device_id_for_pushers" background update + # hasn't run yet. + access_token_id = None + existing_config = await self._get_pusher_config_for_user_by_app_id_and_pushkey( + user_id, app_id, pushkey + ) + if existing_config: + device_id = existing_config.device_id + access_token_id = existing_config.access_token + # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one @@ -136,7 +151,6 @@ async def add_or_update_pusher( PusherConfig( id=None, user_name=user_id, - access_token=access_token, profile_tag=profile_tag, kind=kind, app_id=app_id, @@ -151,23 +165,12 @@ async def add_or_update_pusher( failing_since=None, enabled=enabled, device_id=device_id, + access_token=access_token_id, ) ) - # Before we actually persist the pusher, we check if the user already has one - # this app ID and pushkey. If so, we want to keep the access token and device ID - # in place, since this could be one device modifying (e.g. enabling/disabling) - # another device's pusher. - existing_config = await self._get_pusher_config_for_user_by_app_id_and_pushkey( - user_id, app_id, pushkey - ) - if existing_config: - access_token = existing_config.access_token - device_id = existing_config.device_id - await self.store.add_pusher( user_id=user_id, - access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, @@ -180,6 +183,7 @@ async def add_or_update_pusher( profile_tag=profile_tag, enabled=enabled, device_id=device_id, + access_token_id=access_token_id, ) pusher = await self.process_pusher_change_by_id(app_id, pushkey, user_id) @@ -199,7 +203,7 @@ async def remove_pushers_by_app_id_and_pushkey_not_user( ) await self.remove_pusher(p.app_id, p.pushkey, p.user_name) - async def remove_pushers_by_access_token( + async def remove_pushers_by_access_tokens( self, user_id: str, access_tokens: Iterable[int] ) -> None: """Remove the pushers for a given user corresponding to a set of @@ -209,6 +213,8 @@ async def remove_pushers_by_access_token( user_id: user to remove pushers for access_tokens: access token *ids* to remove pushers for """ + # XXX(quenting): This is only needed until the "set_device_id_for_pushers" + # background update finishes tokens = set(access_tokens) for p in await self.store.get_pushers_by_user_id(user_id): if p.access_token in tokens: @@ -220,6 +226,26 @@ async def remove_pushers_by_access_token( ) await self.remove_pusher(p.app_id, p.pushkey, p.user_name) + async def remove_pushers_by_devices( + self, user_id: str, devices: StrCollection + ) -> None: + """Remove the pushers for a given user corresponding to a set of devices + + Args: + user_id: user to remove pushers for + devices: device IDs to remove pushers for + """ + device_ids = set(devices) + for p in await self.store.get_pushers_by_user_id(user_id): + if p.device_id in device_ids: + logger.info( + "Removing pusher for app id %s, pushkey %s, user %s", + p.app_id, + p.pushkey, + p.user_name, + ) + await self.remove_pusher(p.app_id, p.pushkey, p.user_name) + def on_new_notifications(self, max_token: RoomStreamToken) -> None: if not self.pushers: # nothing to do here. diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 281e8fd0adc2..331f22511618 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -425,7 +425,6 @@ async def on_PUT( ): await self.pusher_pool.add_or_update_pusher( user_id=user_id, - access_token=None, kind="email", app_id="m.email", app_display_name="Email Notifications", diff --git a/synapse/rest/client/pusher.py b/synapse/rest/client/pusher.py index 975eef2144b6..1a8f5292ac5c 100644 --- a/synapse/rest/client/pusher.py +++ b/synapse/rest/client/pusher.py @@ -126,7 +126,6 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: try: await self.pusher_pool.add_or_update_pusher( user_id=user.to_string(), - access_token=requester.access_token_id, kind=content["kind"], app_id=content["app_id"], app_display_name=content["app_display_name"], diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 9a24f7a65526..ab76b754e0e8 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -509,19 +509,24 @@ def __init__( async def _set_device_id_for_pushers( self, progress: JsonDict, batch_size: int ) -> int: - """Background update to populate the device_id column of the pushers table.""" + """ + Background update to populate the device_id column and clear the access_token + column for the pushers table. + """ last_pusher_id = progress.get("pusher_id", 0) def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: txn.execute( """ - SELECT p.id, at.device_id + SELECT + p.id AS pusher_id, + p.device_id AS pusher_device_id, + at.device_id AS token_device_id FROM pushers AS p - INNER JOIN access_tokens AS at + LEFT JOIN access_tokens AS at ON p.access_token = at.id WHERE p.access_token IS NOT NULL - AND at.device_id IS NOT NULL AND p.id > ? ORDER BY p.id LIMIT ? @@ -533,13 +538,27 @@ def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: if len(rows) == 0: return 0 + # The reason we're clearing the access_token column here is a bit subtle. + # When a user logs out, we: + # (1) delete the access token + # (2) delete the device + # + # Ideally, we would delete the pushers only via its link to the device + # during (2), but since this background update might not have fully run yet, + # we're still deleting the pushers via the access token during (1). self.db_pool.simple_update_many_txn( txn=txn, table="pushers", key_names=("id",), - key_values=[(row["id"],) for row in rows], - value_names=("device_id",), - value_values=[(row["device_id"],) for row in rows], + key_values=[(row["pusher_id"],) for row in rows], + value_names=("device_id", "access_token"), + # If there was already a device_id on the pusher, we only want to clear + # the access_token column, so we keep the existing device_id. Otherwise, + # we set the device_id we got from joining the access_tokens table. + value_values=[ + (row["pusher_device_id"] or row["token_device_id"], None) + for row in rows + ], ) self.db_pool.updates._background_update_progress_txn( @@ -568,7 +587,6 @@ class PusherStore(PusherWorkerStore, PusherBackgroundUpdatesStore): async def add_pusher( self, user_id: str, - access_token: Optional[int], kind: str, app_id: str, app_display_name: str, @@ -581,13 +599,13 @@ async def add_pusher( profile_tag: str = "", enabled: bool = True, device_id: Optional[str] = None, + access_token_id: Optional[int] = None, ) -> None: async with self._pushers_id_gen.get_next() as stream_id: await self.db_pool.simple_upsert( table="pushers", keyvalues={"app_id": app_id, "pushkey": pushkey, "user_name": user_id}, values={ - "access_token": access_token, "kind": kind, "app_display_name": app_display_name, "device_display_name": device_display_name, @@ -599,6 +617,10 @@ async def add_pusher( "id": stream_id, "enabled": enabled, "device_id": device_id, + # XXX(quenting): We're only really persisting the access token ID + # when updating an existing pusher. This is in case the + # 'set_device_id_for_pushers' background update hasn't finished yet. + "access_token": access_token_id, }, desc="add_pusher", ) diff --git a/synapse/storage/schema/main/delta/74/02_set_device_id_for_pushers_bg_update.sql b/synapse/storage/schema/main/delta/74/02_set_device_id_for_pushers_bg_update.sql new file mode 100644 index 000000000000..1367fb626733 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/02_set_device_id_for_pushers_bg_update.sql @@ -0,0 +1,19 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Triggers the background update to set the device_id for pushers +-- that don't have one, and clear the access_token column. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (7402, 'set_device_id_for_pushers', '{}'); diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 4ea5472eb475..4b5c96aeaea8 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -105,7 +105,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.hs.get_datastores().main.get_user_by_access_token(self.access_token) ) assert user_tuple is not None - self.token_id = user_tuple.token_id + self.device_id = user_tuple.device_id # We need to add email to account before we can create a pusher. self.get_success( @@ -117,7 +117,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: pusher = self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=self.user_id, - access_token=self.token_id, + device_id=self.device_id, kind="email", app_id="m.email", app_display_name="Email Notifications", @@ -141,7 +141,7 @@ def test_need_validated_email(self) -> None: self.get_success_or_raise( self.hs.get_pusherpool().add_or_update_pusher( user_id=self.user_id, - access_token=self.token_id, + device_id=self.device_id, kind="email", app_id="m.email", app_display_name="Email Notifications", diff --git a/tests/push/test_http.py b/tests/push/test_http.py index c280ddcdf6a9..99cec0836b1d 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -67,13 +67,13 @@ def test_invalid_configuration(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id def test_data(data: Any) -> None: self.get_failure( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -114,12 +114,12 @@ def test_sends_http(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -235,12 +235,12 @@ def test_sends_high_priority_for_encrypted(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -356,12 +356,12 @@ def test_sends_high_priority_for_one_to_one_only(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -443,12 +443,12 @@ def test_sends_high_priority_for_mention(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -521,12 +521,12 @@ def test_sends_high_priority_for_atroom(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -628,12 +628,12 @@ def _test_push_unread_count(self) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -764,12 +764,12 @@ def _set_pusher(self, user_id: str, access_token: str, enabled: bool) -> None: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", @@ -778,7 +778,6 @@ def _set_pusher(self, user_id: str, access_token: str, enabled: bool) -> None: lang=None, data={"url": "http://example.com/_matrix/push/v1/notify"}, enabled=enabled, - device_id=user_tuple.device_id, ) ) @@ -895,19 +894,17 @@ def test_null_enabled(self) -> None: def test_update_different_device_access_token_device_id(self) -> None: """Tests that if we create a pusher from one device, the update it from another - device, the access token and device ID associated with the pusher stays the - same. + device, the device ID associated with the pusher stays the same. """ # Create a user with a pusher. user_id, access_token = self._make_user_with_pusher("user") - # Get the token ID for the current access token, since that's what we store in - # the pushers table. Also get the device ID from it. + # Get the device ID for the current access token, since that's what we store in + # the pushers table. user_tuple = self.get_success( self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_tuple is not None - token_id = user_tuple.token_id device_id = user_tuple.device_id # Generate a new access token, and update the pusher with it. @@ -920,10 +917,9 @@ def test_update_different_device_access_token_device_id(self) -> None: ) pushers: List[PusherConfig] = list(ret) - # Check that we still have one pusher, and that the access token and device ID - # associated with it didn't change. + # Check that we still have one pusher, and that the device ID associated with + # it didn't change. self.assertEqual(len(pushers), 1) - self.assertEqual(pushers[0].access_token, token_id) self.assertEqual(pushers[0].device_id, device_id) @override_config({"experimental_features": {"msc3881_enabled": True}}) diff --git a/tests/replication/test_pusher_shard.py b/tests/replication/test_pusher_shard.py index 0798b021c3d0..dcb3e6669bb9 100644 --- a/tests/replication/test_pusher_shard.py +++ b/tests/replication/test_pusher_shard.py @@ -51,12 +51,12 @@ def _create_pusher_and_send_msg(self, localpart: str) -> str: self.hs.get_datastores().main.get_user_by_access_token(access_token) ) assert user_dict is not None - token_id = user_dict.token_id + device_id = user_dict.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=user_id, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index 4b8f889a71ba..b4241ceaf023 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -3047,12 +3047,12 @@ def test_get_pushers(self) -> None: self.store.get_user_by_access_token(other_user_token) ) assert user_tuple is not None - token_id = user_tuple.token_id + device_id = user_tuple.device_id self.get_success( self.hs.get_pusherpool().add_or_update_pusher( user_id=self.other_user, - access_token=token_id, + device_id=device_id, kind="http", app_id="m.http", app_display_name="HTTP Push Notifications", From 5f7c9082805846cc07bfef2d48c6f6cfc9f723e9 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 24 Mar 2023 15:31:12 +0000 Subject: [PATCH 015/106] As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. (#15316) --- changelog.d/15316.misc | 1 + synapse/storage/databases/main/user_directory.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15316.misc diff --git a/changelog.d/15316.misc b/changelog.d/15316.misc new file mode 100644 index 000000000000..1f408739f0bc --- /dev/null +++ b/changelog.d/15316.misc @@ -0,0 +1 @@ +As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. \ No newline at end of file diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 97f09b73dde6..9fced4b99718 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -698,10 +698,17 @@ async def delete_all_from_user_dir(self) -> None: """Delete the entire user directory""" def _delete_all_from_user_dir_txn(txn: LoggingTransaction) -> None: - txn.execute("DELETE FROM user_directory") - txn.execute("DELETE FROM user_directory_search") - txn.execute("DELETE FROM users_in_public_rooms") - txn.execute("DELETE FROM users_who_share_private_rooms") + # SQLite doesn't support TRUNCATE. + # On Postgres, DELETE FROM does a table scan but TRUNCATE is more efficient. + truncate = ( + "DELETE FROM" + if isinstance(self.database_engine, Sqlite3Engine) + else "TRUNCATE" + ) + txn.execute(f"{truncate} user_directory") + txn.execute(f"{truncate} user_directory_search") + txn.execute(f"{truncate} users_in_public_rooms") + txn.execute(f"{truncate} users_who_share_private_rooms") txn.call_after(self.get_user_in_directory.invalidate_all) await self.db_pool.runInteraction( From d5324ee111ea56fa466eab7e3974dc4894a64d46 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 24 Mar 2023 16:41:10 +0000 Subject: [PATCH 016/106] Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. (#15265) Co-authored-by: Patrick Cloke --- .ci/scripts/prepare_old_deps.sh | 7 +- .github/workflows/docs.yaml | 77 +- .gitignore | 1 + changelog.d/15265.misc | 1 + dev-docs/Makefile | 20 + dev-docs/conf.py | 50 + dev-docs/index.rst | 22 + dev-docs/modules/federation_sender.md | 5 + poetry.lock | 1480 +++++++++++++++++-------- pyproject.toml | 12 + scripts-dev/lint.sh | 1 + synapse/federation/sender/__init__.py | 113 ++ 12 files changed, 1280 insertions(+), 509 deletions(-) create mode 100644 changelog.d/15265.misc create mode 100644 dev-docs/Makefile create mode 100644 dev-docs/conf.py create mode 100644 dev-docs/index.rst create mode 100644 dev-docs/modules/federation_sender.md diff --git a/.ci/scripts/prepare_old_deps.sh b/.ci/scripts/prepare_old_deps.sh index 3398193ee565..e536a9db8b33 100755 --- a/.ci/scripts/prepare_old_deps.sh +++ b/.ci/scripts/prepare_old_deps.sh @@ -35,9 +35,9 @@ sed -i \ # compatible (as far the package metadata declares, anyway); pip's package resolver # is more lax. # -# Rather than `poetry install --no-dev`, we drop all dev dependencies from the -# toml file. This means we don't have to ensure compatibility between old deps and -# dev tools. +# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs +# group from the toml file. This means we don't have to ensure compatibility between +# old deps and dev tools. pip install toml wheel @@ -47,6 +47,7 @@ with open('pyproject.toml', 'r') as f: data = toml.loads(f.read()) del data['tool']['poetry']['dev-dependencies'] +del data['tool']['poetry']['group']['dev-docs'] with open('pyproject.toml', 'w') as f: toml.dump(data, f) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 55b4b287f677..20a29e7cbf60 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -13,25 +13,10 @@ on: workflow_dispatch: jobs: - pages: - name: GitHub Pages + pre: + name: Calculate variables for GitHub Pages deployment runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - - name: Setup mdbook - uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 - with: - mdbook-version: '0.4.17' - - - name: Build the documentation - # mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md. - # However, we're using docs/README.md for other purposes and need to pick a new page - # as the default. Let's opt for the welcome page instead. - run: | - mdbook build - cp book/welcome_and_overview.html book/index.html - # Figure out the target directory. # # The target directory depends on the name of the branch @@ -55,11 +40,65 @@ jobs: # finally, set the 'branch-version' var. echo "branch-version=$branch" >> "$GITHUB_OUTPUT" - + outputs: + branch-version: ${{ steps.vars.outputs.branch-version }} + +################################################################################ + pages-docs: + name: GitHub Pages + runs-on: ubuntu-latest + needs: + - pre + steps: + - uses: actions/checkout@v3 + + - name: Setup mdbook + uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + with: + mdbook-version: '0.4.17' + + - name: Build the documentation + # mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md. + # However, we're using docs/README.md for other purposes and need to pick a new page + # as the default. Let's opt for the welcome page instead. + run: | + mdbook build + cp book/welcome_and_overview.html book/index.html + # Deploy to the target directory. - name: Deploy to gh pages uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book - destination_dir: ./${{ steps.vars.outputs.branch-version }} + destination_dir: ./${{ needs.pre.outputs.branch-version }} + +################################################################################ + pages-devdocs: + name: GitHub Pages (developer docs) + runs-on: ubuntu-latest + needs: + - pre + steps: + - uses: action/checkout@v3 + + - name: "Set up Sphinx" + uses: matrix-org/setup-python-poetry@v1 + with: + python-version: "3.x" + poetry-version: "1.3.2" + groups: "dev-docs" + extras: "" + + - name: Build the documentation + run: | + cd dev-docs + poetry run make html + + # Deploy to the target directory. + - name: Deploy to gh pages + uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./dev-docs/_build/html + destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }} diff --git a/.gitignore b/.gitignore index 6937de88bcd1..96c451258ee6 100644 --- a/.gitignore +++ b/.gitignore @@ -53,6 +53,7 @@ __pycache__/ /coverage.* /dist/ /docs/build/ +/dev-docs/_build/ /htmlcov /pip-wheel-metadata/ diff --git a/changelog.d/15265.misc b/changelog.d/15265.misc new file mode 100644 index 000000000000..355c3cae2b7f --- /dev/null +++ b/changelog.d/15265.misc @@ -0,0 +1 @@ +Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. \ No newline at end of file diff --git a/dev-docs/Makefile b/dev-docs/Makefile new file mode 100644 index 000000000000..d4bb2cbb9edd --- /dev/null +++ b/dev-docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/dev-docs/conf.py b/dev-docs/conf.py new file mode 100644 index 000000000000..826d578c0bec --- /dev/null +++ b/dev-docs/conf.py @@ -0,0 +1,50 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "Synapse development" +copyright = "2023, The Matrix.org Foundation C.I.C." +author = "The Synapse Maintainers and Community" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "autodoc2", + "myst_parser", +] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for Autodoc2 ---------------------------------------------------- + +autodoc2_docstring_parser_regexes = [ + # this will render all docstrings as 'MyST' Markdown + (r".*", "myst"), +] + +autodoc2_packages = [ + { + "path": "../synapse", + # Don't render documentation for everything as a matter of course + "auto_mode": False, + }, +] + + +# -- Options for MyST (Markdown) --------------------------------------------- + +# myst_heading_anchors = 2 + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "furo" +html_static_path = ["_static"] diff --git a/dev-docs/index.rst b/dev-docs/index.rst new file mode 100644 index 000000000000..1ef210460abc --- /dev/null +++ b/dev-docs/index.rst @@ -0,0 +1,22 @@ +.. Synapse Developer Documentation documentation master file, created by + sphinx-quickstart on Mon Mar 13 08:59:51 2023. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to the Synapse Developer Documentation! +=========================================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + modules/federation_sender + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/dev-docs/modules/federation_sender.md b/dev-docs/modules/federation_sender.md new file mode 100644 index 000000000000..dac6852c1664 --- /dev/null +++ b/dev-docs/modules/federation_sender.md @@ -0,0 +1,5 @@ +Federation Sender +================= + +```{autodoc2-docstring} synapse.federation.sender +``` diff --git a/poetry.lock b/poetry.lock index 76fbfafcf9eb..d42b3a57102e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,37 @@ # This file is automatically @generated by Poetry and should not be changed by hand. +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "astroid" +version = "2.15.0" +description = "An abstract syntax tree for Python with inference support." +category = "dev" +optional = false +python-versions = ">=3.7.2" +files = [ + {file = "astroid-2.15.0-py3-none-any.whl", hash = "sha256:e3e4d0ffc2d15d954065579689c36aac57a339a4679a679579af6401db4d3fdb"}, + {file = "astroid-2.15.0.tar.gz", hash = "sha256:525f126d5dc1b8b0b6ee398b33159105615d92dc4a17f2cd064125d57f6186fa"}, +] + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] + [[package]] name = "attrs" version = "22.2.0" @@ -53,6 +85,21 @@ six = "*" [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + [[package]] name = "bcrypt" version = "4.0.1" @@ -88,6 +135,25 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "beautifulsoup4" +version = "4.12.0" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"}, + {file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "black" version = "23.1.0" @@ -261,19 +327,89 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "click" version = "8.1.3" @@ -306,14 +442,14 @@ click = "*" [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] @@ -345,35 +481,31 @@ files = [ [[package]] name = "cryptography" -version = "39.0.2" +version = "40.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06"}, - {file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536"}, - {file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5"}, - {file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0"}, - {file = "cryptography-39.0.2-cp36-abi3-win32.whl", hash = "sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480"}, - {file = "cryptography-39.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3"}, - {file = "cryptography-39.0.2.tar.gz", hash = "sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f"}, + {file = "cryptography-40.0.0-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:168ded448fb5d82dfa911156ab8b13b1716de65bd50ff977f4657643f998fa05"}, + {file = "cryptography-40.0.0-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:71cb346b9dd1537102e7466a2d629385b01847f8d96cd7405f0e717d91cebc8e"}, + {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e63fb48e2615cfab5a9c4bb457d35e7ae03ea8593996bfbe257e78244d12d0"}, + {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2c4134d29cdce0735c16abf48fa8435f001a7b0031e68dd9a9ee1c80a29374a"}, + {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:754dc5ab648113dc54197f242db43234a04e4d61193fb5d3ebb42bd569dca571"}, + {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43089be365c0ca4235c6e4e781f3bc125bc1fff576c9dd22cdfb585309b9bb9d"}, + {file = "cryptography-40.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:23c42c59c2b5b9ddc6a85b5c46b8fabc4d63a1714f4dbea4bf20d25690bf2365"}, + {file = "cryptography-40.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6b36e2864e04c82634879c7e7aad48824b1847fdb06b64cd410d2ec5e51d1b31"}, + {file = "cryptography-40.0.0-cp36-abi3-win32.whl", hash = "sha256:e917a07094217edeefe8f6ea960b45d7aab650b982e4209da078332cc9d3ac3a"}, + {file = "cryptography-40.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:fba36ec552794a06a07ac8bdc5ad83a587f6959d98547f373d401975d55c7c9e"}, + {file = "cryptography-40.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fafa997b9e6818db333ded4b379f5b7679b48bd88ac878428cea2a1aa6e79fd8"}, + {file = "cryptography-40.0.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b05c9f25a1ea42e427085230815bbdebe15a53bb6163c4c06022e5630645046b"}, + {file = "cryptography-40.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14da8c26755ffa5c7863ffa5e8b87cb9596a21b6c34852cb19e0f48c226c64fb"}, + {file = "cryptography-40.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7162ae4530958114ca2eee30a56eca46527def33493f622f059dc2e825fd0913"}, + {file = "cryptography-40.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e5855a80c77565fe2464e88e0095764e25d8ddb2d24df2b1d31773e80be94435"}, + {file = "cryptography-40.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:34f502619964210939bb7ee7cd5df53178534eb08d3526f941695a8f7aa0efe4"}, + {file = "cryptography-40.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:722cfddae79684166840be2cbbae154f44a455519e644b60bf274a50ccb834db"}, + {file = "cryptography-40.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7cc9fc3ffcb766c313ed0515d77d0deabb4f36bdcff3a9f115c43e5ec611b82a"}, + {file = "cryptography-40.0.0.tar.gz", hash = "sha256:f421f6777592eb199ca8abac7c20b9ecef27c50ad63546e6c614b29771b46d0d"}, ] [package.dependencies] @@ -382,10 +514,10 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] @@ -421,41 +553,59 @@ dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version [[package]] name = "docutils" -version = "0.18.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, - {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] [[package]] name = "elementpath" -version = "2.5.0" -description = "XPath 1.0/2.0 parsers and selectors for ElementTree and lxml" +version = "4.1.0" +description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and lxml" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "elementpath-2.5.0-py3-none-any.whl", hash = "sha256:2a432775e37a19e4362443078130a7dbfc457d7d093cd421c03958d9034cc08b"}, - {file = "elementpath-2.5.0.tar.gz", hash = "sha256:3a27aaf3399929fccda013899cb76d3ff111734abf4281e5f9d3721ba0b9ffa3"}, + {file = "elementpath-4.1.0-py3-none-any.whl", hash = "sha256:2b1b524223d70fd6dd63a36b9bc32e4919c96a272c2d1454094c4d85086bc6f8"}, + {file = "elementpath-4.1.0.tar.gz", hash = "sha256:dbd7eba3cf0b3b4934f627ba24851a3e0798ef2bc9104555a4cd831f2e6e8e14"}, ] [package.extras] -dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"] +dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", "memray", "mypy", "tox", "xmlschema (>=2.0.0)"] + +[[package]] +name = "furo" +version = "2022.12.7" +description = "A clean customisable Sphinx documentation theme." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "furo-2022.12.7-py3-none-any.whl", hash = "sha256:7cb76c12a25ef65db85ab0743df907573d03027a33631f17d267e598ebb191f7"}, + {file = "furo-2022.12.7.tar.gz", hash = "sha256:d8008f8efbe7587a97ba533c8b2df1f9c21ee9b3e5cad0d27f61193d38b1a986"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=5.0,<7.0" +sphinx-basic-ng = "*" [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] [package.dependencies] @@ -691,6 +841,18 @@ files = [ {file = "ijson-3.2.0.post0.tar.gz", hash = "sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "immutabledict" version = "2.2.3" @@ -705,14 +867,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, ] [package.dependencies] @@ -726,36 +888,37 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag [[package]] name = "importlib-resources" -version = "5.4.0" +version = "5.12.0" description = "Read resources from Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [[package]] name = "incremental" -version = "21.3.0" -description = "A small library that versions your Python projects." +version = "22.10.0" +description = "\"A small library that versions your Python projects.\"" category = "main" optional = false python-versions = "*" files = [ - {file = "incremental-21.3.0-py2.py3-none-any.whl", hash = "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321"}, - {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, + {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, + {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, ] [package.extras] +mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] @@ -796,20 +959,39 @@ tornado = ">=4.3" [package.extras] tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] +[[package]] +name = "jaraco-classes" +version = "3.2.3" +description = "Utility functions for Python class constructs" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"}, + {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + [[package]] name = "jeepney" -version = "0.7.1" +version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, - {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, ] [package.extras] -test = ["async-timeout", "pytest", "pytest-asyncio", "pytest-trio", "testpath", "trio"] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] trio = ["async_generator", "trio"] [[package]] @@ -856,25 +1038,74 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "keyring" -version = "23.5.0" +version = "23.13.1" description = "Store and access your passwords safely." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "keyring-23.5.0-py3-none-any.whl", hash = "sha256:b0d28928ac3ec8e42ef4cc227822647a19f1d544f21f96457965dc01cf555261"}, - {file = "keyring-23.5.0.tar.gz", hash = "sha256:9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9"}, + {file = "keyring-23.13.1-py3-none-any.whl", hash = "sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd"}, + {file = "keyring-23.13.1.tar.gz", hash = "sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678"}, ] [package.dependencies] -importlib-metadata = ">=3.6" +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +"jaraco.classes" = "*" jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} -pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +completion = ["shtab"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.9.0" +description = "A fast and thorough lazy object proxy." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, +] [[package]] name = "ldap3" @@ -984,54 +1215,90 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] +[[package]] +name = "markdown-it-py" +version = "2.2.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" +typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" -version = "2.1.0" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, - {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, ] [[package]] @@ -1074,6 +1341,50 @@ Twisted = ">=15.1.0" [package.extras] dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"] +[[package]] +name = "mdit-py-plugins" +version = "0.3.5" +description = "Collection of plugins for markdown-it-py" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"}, + {file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<3.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "more-itertools" +version = "9.1.0" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"}, + {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, +] + [[package]] name = "msgpack" version = "1.0.5" @@ -1197,14 +1508,14 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -1227,6 +1538,33 @@ mypy = "1.0.0" [package.extras] test = ["lxml", "pytest (>=4.6)", "pytest-cov"] +[[package]] +name = "myst-parser" +version = "1.0.0" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae"}, + {file = "myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c"}, +] + +[package.dependencies] +docutils = ">=0.15,<0.20" +jinja2 = "*" +markdown-it-py = ">=1.0.0,<3.0.0" +mdit-py-plugins = ">=0.3.4,<0.4.0" +pyyaml = "*" +sphinx = ">=5,<7" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=1.0,<2.0)"] +rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.7.5,<0.8.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] + [[package]] name = "netaddr" version = "0.8.0" @@ -1282,26 +1620,26 @@ dev = ["jinja2"] [[package]] name = "pathspec" -version = "0.9.0" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" files = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] name = "phonenumbers" -version = "8.13.5" +version = "8.13.7" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." category = "main" optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.5-py2.py3-none-any.whl", hash = "sha256:2e3fd1f3fde226b289489275517c76edf223eafd9f43a2c2c36498a44b73d4b0"}, - {file = "phonenumbers-8.13.5.tar.gz", hash = "sha256:6eb2faf29c19f946baf10f1c977a1f856cab90819fe7735b8e141d5407420c4a"}, + {file = "phonenumbers-8.13.7-py2.py3-none-any.whl", hash = "sha256:d3e3555b38c89b121f5b2e917847003bdd07027569d758d5f40156c01aeac089"}, + {file = "phonenumbers-8.13.7.tar.gz", hash = "sha256:253bb0e01250d21a11f2b42b3e6e161b7f6cb2ac440e2e2a95c1da71d221ee1a"}, ] [[package]] @@ -1397,21 +1735,21 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "pkginfo" -version = "1.8.2" -description = "Query metadatdata from sdists / bdists / installed packages." +version = "1.9.6" +description = "Query metadata from sdists / bdists / installed packages." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"}, - {file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"}, + {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, + {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, ] [package.extras] -testing = ["coverage", "nose"] +testing = ["pytest", "pytest-cov"] [[package]] -name = "pkgutil_resolve_name" +name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." category = "main" @@ -1424,19 +1762,22 @@ files = [ [[package]] name = "platformdirs" -version = "2.5.1" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.1.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, - {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} + [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "prometheus-client" @@ -1546,48 +1887,48 @@ files = [ [[package]] name = "pydantic" -version = "1.10.6" +version = "1.10.7" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31"}, - {file = "pydantic-1.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160"}, - {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"}, - {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4"}, - {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084"}, - {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb"}, - {file = "pydantic-1.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7"}, - {file = "pydantic-1.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b"}, - {file = "pydantic-1.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d"}, - {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7"}, - {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d"}, - {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186"}, - {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70"}, - {file = "pydantic-1.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4"}, - {file = "pydantic-1.10.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65"}, - {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2"}, - {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2"}, - {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a"}, - {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd"}, - {file = "pydantic-1.10.6-cp37-cp37m-win_amd64.whl", hash = "sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb"}, - {file = "pydantic-1.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6"}, - {file = "pydantic-1.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77"}, - {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832"}, - {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d"}, - {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c"}, - {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f"}, - {file = "pydantic-1.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35"}, - {file = "pydantic-1.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7"}, - {file = "pydantic-1.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d"}, - {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f"}, - {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62"}, - {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc"}, - {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a"}, - {file = "pydantic-1.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06"}, - {file = "pydantic-1.10.6-py3-none-any.whl", hash = "sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0"}, - {file = "pydantic-1.10.6.tar.gz", hash = "sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd"}, + {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, + {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, + {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, + {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, + {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, + {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, + {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, + {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, + {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, + {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, ] [package.dependencies] @@ -1617,16 +1958,19 @@ requests = ">=2.14.0" [[package]] name = "pygments" -version = "2.11.2" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyicu" version = "2.10.2" @@ -1640,23 +1984,23 @@ files = [ [[package]] name = "pyjwt" -version = "2.4.0" +version = "2.6.0" description = "JSON Web Token implementation in Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, - {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, + {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, + {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, ] [package.dependencies] -cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""} +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} [package.extras] -crypto = ["cryptography (>=3.3.1)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -1716,18 +2060,18 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "23.0.0" +version = "23.1.0" description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "pyOpenSSL-23.0.0-py3-none-any.whl", hash = "sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0"}, - {file = "pyOpenSSL-23.0.0.tar.gz", hash = "sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f"}, + {file = "pyOpenSSL-23.1.0-py3-none-any.whl", hash = "sha256:fb96e936866ad65662c22d0de84ca0fba58397893cdfe0f01334fa93382af23c"}, + {file = "pyOpenSSL-23.1.0.tar.gz", hash = "sha256:8cb78010a1eb2c8e24b851693b7b04dfe9b1dc0a5ab3843927b10a85b1dfbb2e"}, ] [package.dependencies] -cryptography = ">=38.0.0,<40" +cryptography = ">=38.0.0,<41" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] @@ -1735,33 +2079,39 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] [[package]] @@ -1807,14 +2157,14 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2021.3" +version = "2022.7.1" description = "World timezone definitions, modern and historical" category = "main" -optional = true +optional = false python-versions = "*" files = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, ] [[package]] @@ -1881,14 +2231,14 @@ files = [ [[package]] name = "readme-renderer" -version = "37.2" +version = "37.3" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "readme_renderer-37.2-py3-none-any.whl", hash = "sha256:d3f06a69e8c40fca9ab3174eca48f96d9771eddb43517b17d96583418427b106"}, - {file = "readme_renderer-37.2.tar.gz", hash = "sha256:e8ad25293c98f781dbc2c5a36a309929390009f902f99e1798c761aaf04a7923"}, + {file = "readme_renderer-37.3-py3-none-any.whl", hash = "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343"}, + {file = "readme_renderer-37.3.tar.gz", hash = "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273"}, ] [package.dependencies] @@ -1901,36 +2251,36 @@ md = ["cmarkgfm (>=0.8.0)"] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" files = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-toolbelt" -version = "0.9.1" +version = "0.10.1" description = "A utility belt for advanced users of python-requests" category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, - {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, + {file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"}, + {file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"}, ] [package.dependencies] @@ -1953,23 +2303,23 @@ idna2008 = ["idna"] [[package]] name = "rich" -version = "12.6.0" +version = "13.3.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "dev" optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" files = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"}, + {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"}, ] [package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" +markdown-it-py = ">=2.2.0,<3.0.0" +pygments = ">=2.13.0,<3.0.0" typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" @@ -2000,14 +2350,14 @@ files = [ [[package]] name = "secretstorage" -version = "3.3.1" +version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"}, - {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"}, + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, ] [package.dependencies] @@ -2098,18 +2448,18 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "65.5.1" +version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -2176,6 +2526,18 @@ files = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + [[package]] name = "sortedcontainers" version = "2.4.0" @@ -2188,6 +2550,190 @@ files = [ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] +[[package]] +name = "soupsieve" +version = "2.4" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, +] + +[[package]] +name = "sphinx" +version = "6.1.3" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"}, + {file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "html5lib", "pytest (>=4.6)"] + +[[package]] +name = "sphinx-autodoc2" +version = "0.4.2" +description = "Analyse a python project and create documentation for it." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx-autodoc2-0.4.2.tar.gz", hash = "sha256:06da226a25a4339e173b34bb0e590e0ba9b4570b414796140aee1939d09acb3a"}, + {file = "sphinx_autodoc2-0.4.2-py3-none-any.whl", hash = "sha256:00835ba8c980b9c510ea794c3e2060e5a254a74c6c22badc9bfd3642dc1034b4"}, +] + +[package.dependencies] +astroid = ">=2.7" +tomli = {version = "*", markers = "python_version < \"3.11\""} +typing-extensions = "*" + +[package.extras] +cli = ["typer[all]"] +docs = ["furo", "myst-parser", "sphinx (>=4.0.0)"] +sphinx = ["sphinx (>=4.0.0)"] +testing = ["pytest", "pytest-cov", "pytest-regressions", "sphinx (>=4.0.0)"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b1" +description = "A modern skeleton for Sphinx themes." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"}, + {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + [[package]] name = "systemd-python" version = "235" @@ -2216,13 +2762,13 @@ tornado = "*" [[package]] name = "thrift" -version = "0.15.0" +version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" category = "main" optional = true python-versions = "*" files = [ - {file = "thrift-0.15.0.tar.gz", hash = "sha256:87c8205a71cf8bbb111cb99b1f7495070fbc9cabb671669568854210da5b3e29"}, + {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] [package.dependencies] @@ -2235,65 +2781,35 @@ twisted = ["twisted"] [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "tornado" -version = "6.1" +version = "6.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "main" optional = true -python-versions = ">= 3.5" -files = [ - {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, - {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, - {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, - {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, - {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, - {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, - {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, - {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, - {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, - {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, - {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, - {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, - {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, - {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, - {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, - {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, - {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, - {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, - {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, - {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, - {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, - {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, - {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, - {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, - {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, - {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, - {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +python-versions = ">= 3.7" +files = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, ] [[package]] @@ -2447,48 +2963,48 @@ twisted = "*" [[package]] name = "typed-ast" -version = "1.5.2" +version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, - {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, - {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, - {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, - {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, - {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, - {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, - {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, - {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, - {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] [[package]] name = "types-bleach" -version = "6.0.0.0" +version = "6.0.0.1" description = "Typing stubs for bleach" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-bleach-6.0.0.0.tar.gz", hash = "sha256:770ce9c7ea6173743ef1a4a70f2619bb1819bf53c7cd0336d939af93f488fbe2"}, - {file = "types_bleach-6.0.0.0-py3-none-any.whl", hash = "sha256:75f55f035837c5fce2cd0bd5162a2a90057680a89c9275588a5c12f5f597a14a"}, + {file = "types-bleach-6.0.0.1.tar.gz", hash = "sha256:43d9129deb9e82918747437edf78f09ff440f2973f4702625b61994f3e698518"}, + {file = "types_bleach-6.0.0.1-py3-none-any.whl", hash = "sha256:440df967254007be80bb0f4d851f026c29c709cc48359bf4935d2b2f3a6f9f90"}, ] [[package]] @@ -2505,14 +3021,14 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.17.0.5" +version = "4.17.0.6" description = "Typing stubs for jsonschema" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-jsonschema-4.17.0.5.tar.gz", hash = "sha256:7adc7bfca4afe291de0c93eca9367aa72a4fbe8ce87fe15642c600ad97d45dd6"}, - {file = "types_jsonschema-4.17.0.5-py3-none-any.whl", hash = "sha256:79ac8a7763fe728947af90a24168b91621edf7e8425bf3670abd4ea0d4758fba"}, + {file = "types-jsonschema-4.17.0.6.tar.gz", hash = "sha256:e9b15e34b4f2fd5587bd68530fa0eb2a17c73ead212f4471d71eea032d231c46"}, + {file = "types_jsonschema-4.17.0.6-py3-none-any.whl", hash = "sha256:ecef99bc64848f3798ad18922dfb2b40da25f17796fafcee50da984a21c5d6e6"}, ] [[package]] @@ -2580,26 +3096,26 @@ cryptography = ">=35.0.0" [[package]] name = "types-pyyaml" -version = "6.0.12.3" +version = "6.0.12.8" description = "Typing stubs for PyYAML" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.3.tar.gz", hash = "sha256:17ce17b3ead8f06e416a3b1d5b8ddc6cb82a422bb200254dd8b469434b045ffc"}, - {file = "types_PyYAML-6.0.12.3-py3-none-any.whl", hash = "sha256:879700e9f215afb20ab5f849590418ab500989f83a57e635689e1d50ccc63f0c"}, + {file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"}, + {file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"}, ] [[package]] name = "types-requests" -version = "2.28.11.15" +version = "2.28.11.16" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.28.11.15.tar.gz", hash = "sha256:fc8eaa09cc014699c6b63c60c2e3add0c8b09a410c818b5ac6e65f92a26dde09"}, - {file = "types_requests-2.28.11.15-py3-none-any.whl", hash = "sha256:a05e4c7bc967518fba5789c341ea8b0c942776ee474c7873129a61161978e586"}, + {file = "types-requests-2.28.11.16.tar.gz", hash = "sha256:9d4002056df7ebc4ec1f28fd701fba82c5c22549c4477116cb2656aa30ace6db"}, + {file = "types_requests-2.28.11.16-py3-none-any.whl", hash = "sha256:a86921028335fdcc3aaf676c9d3463f867db6af2303fc65aa309b13ae1e6dd53"}, ] [package.dependencies] @@ -2607,26 +3123,26 @@ types-urllib3 = "<1.27" [[package]] name = "types-setuptools" -version = "67.5.0.0" +version = "67.6.0.5" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.5.0.0.tar.gz", hash = "sha256:fa6f231eeb27e86b1d6e8260f73de300e91f99c205b9a5e21debd49f3726a849"}, - {file = "types_setuptools-67.5.0.0-py3-none-any.whl", hash = "sha256:f7f4bf4ab777e88631d3a387bbfdd4d480a2a4693ca896130f8ef738370377b8"}, + {file = "types-setuptools-67.6.0.5.tar.gz", hash = "sha256:3a708e66c7bdc620e4d0439f344c750c57a4340c895a4c3ed2d0fc4ae8eb9962"}, + {file = "types_setuptools-67.6.0.5-py3-none-any.whl", hash = "sha256:dae5a4a659dbb6dba57773440f6e2dbdd8ef282dc136a174a8a59bd33d949945"}, ] [[package]] name = "types-urllib3" -version = "1.26.10" +version = "1.26.25.8" description = "Typing stubs for urllib3" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, - {file = "types_urllib3-1.26.10-py3-none-any.whl", hash = "sha256:d755278d5ecd7a7a6479a190e54230f241f1a99c19b81518b756b19dc69e518c"}, + {file = "types-urllib3-1.26.25.8.tar.gz", hash = "sha256:ecf43c42d8ee439d732a1110b4901e9017a79a38daca26f08e42c8460069392c"}, + {file = "types_urllib3-1.26.25.8-py3-none-any.whl", hash = "sha256:95ea847fbf0bf675f50c8ae19a665baedcf07e6b4641662c4c3c72e7b2edf1a9"}, ] [[package]] @@ -2655,14 +3171,14 @@ files = [ [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -2684,124 +3200,135 @@ files = [ [[package]] name = "wrapt" -version = "1.14.1" +version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] [[package]] name = "xmlschema" -version = "1.10.0" +version = "2.2.2" description = "An XML Schema validator and decoder" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "xmlschema-1.10.0-py3-none-any.whl", hash = "sha256:dbd68bded2fef00c19cf37110ca0565eca34cf0b6c9e1d3b62ad0de8cbb582ca"}, - {file = "xmlschema-1.10.0.tar.gz", hash = "sha256:be1eedce6a4b911fd3a7f4060d0811951820a13410e61f0454b30e9f4e7cf197"}, + {file = "xmlschema-2.2.2-py3-none-any.whl", hash = "sha256:557f3632b54b6ff10576736bba62e43db84eb60f6465a83818576cd9ffcc1799"}, + {file = "xmlschema-2.2.2.tar.gz", hash = "sha256:0caa96668807b4b51c42a0fe2b6610752bc59f069615df3e34dcfffb962973fd"}, ] [package.dependencies] -elementpath = ">=2.5.0,<3.0.0" +elementpath = ">=4.0.0,<5.0.0" [package.extras] -codegen = ["elementpath (>=2.5.0,<3.0.0)", "jinja2"] -dev = ["Sphinx", "coverage", "elementpath (>=2.5.0,<3.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"] -docs = ["Sphinx", "elementpath (>=2.5.0,<3.0.0)", "jinja2", "sphinx-rtd-theme"] +codegen = ["elementpath (>=4.0.0,<5.0.0)", "jinja2"] +dev = ["Sphinx", "coverage", "elementpath (>=4.0.0,<5.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"] +docs = ["Sphinx", "elementpath (>=4.0.0,<5.0.0)", "jinja2", "sphinx-rtd-theme"] [[package]] name = "zipp" -version = "3.7.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [[package]] -name = "zope.event" -version = "4.5.0" +name = "zope-event" +version = "4.6" description = "Very basic event publishing system" category = "dev" optional = false python-versions = "*" files = [ - {file = "zope.event-4.5.0-py2.py3-none-any.whl", hash = "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42"}, - {file = "zope.event-4.5.0.tar.gz", hash = "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330"}, + {file = "zope.event-4.6-py2.py3-none-any.whl", hash = "sha256:73d9e3ef750cca14816a9c322c7250b0d7c9dbc337df5d1b807ff8d3d0b9e97c"}, + {file = "zope.event-4.6.tar.gz", hash = "sha256:81d98813046fc86cc4136e3698fee628a3282f9c320db18658c21749235fce80"}, ] [package.dependencies] @@ -2812,64 +3339,43 @@ docs = ["Sphinx"] test = ["zope.testrunner"] [[package]] -name = "zope.interface" -version = "5.4.0" +name = "zope-interface" +version = "6.0" description = "Interfaces for Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "zope.interface-5.4.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531"}, - {file = "zope.interface-5.4.0-cp27-cp27m-win32.whl", hash = "sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325"}, - {file = "zope.interface-5.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117"}, - {file = "zope.interface-5.4.0-cp35-cp35m-win32.whl", hash = "sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8"}, - {file = "zope.interface-5.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63"}, - {file = "zope.interface-5.4.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2"}, - {file = "zope.interface-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78"}, - {file = "zope.interface-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1"}, - {file = "zope.interface-5.4.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf"}, - {file = "zope.interface-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7"}, - {file = "zope.interface-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94"}, - {file = "zope.interface-5.4.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4"}, - {file = "zope.interface-5.4.0-cp38-cp38-win32.whl", hash = "sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb"}, - {file = "zope.interface-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54"}, - {file = "zope.interface-5.4.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c"}, - {file = "zope.interface-5.4.0-cp39-cp39-win32.whl", hash = "sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e"}, - {file = "zope.interface-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09"}, - {file = "zope.interface-5.4.0.tar.gz", hash = "sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, + {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, + {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, + {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, + {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, + {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, + {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, + {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, ] [package.dependencies] @@ -2881,15 +3387,15 @@ test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [[package]] -name = "zope.schema" -version = "6.2.0" +name = "zope-schema" +version = "7.0.1" description = "zope.interface extension for defining data schemas" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "zope.schema-6.2.0-py2.py3-none-any.whl", hash = "sha256:03150d8670549590b45109e06b7b964f4e751fa9cb5297ec4985c3bc38641b07"}, - {file = "zope.schema-6.2.0.tar.gz", hash = "sha256:2201aef8ad75ee5a881284d7a6acd384661d6dca7bde5e80a22839a77124595b"}, + {file = "zope.schema-7.0.1-py3-none-any.whl", hash = "sha256:cf006c678793b00e0075ad54d55281c8785ea21e5bc1f5ec0584787719c2aab2"}, + {file = "zope.schema-7.0.1.tar.gz", hash = "sha256:ead4dbcb03354d4e410c9a3b904451eb44d90254751b1cbdedf4a61aede9fbb9"}, ] [package.dependencies] @@ -2920,4 +3426,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "0ca92e52a1952f9485172efe25a039351280c28f0a158869557dc2f8855786fe" +content-hash = "1455d7b3de98a85a0361ae7018c5cde92264aa35d0bf842d79fe4d778787bbf6" diff --git a/pyproject.toml b/pyproject.toml index c0111dd79696..d10c390043e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -350,6 +350,18 @@ towncrier = ">=18.6.0rc1" # Used for checking the Poetry lockfile tomli = ">=1.2.3" + +# Dependencies for building the development documentation +[tool.poetry.group.dev-docs] +optional = true + +[tool.poetry.group.dev-docs.dependencies] +sphinx = {version = "^6.1", python = "^3.8"} +sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"} +myst-parser = {version = "^1.0.0", python = "^3.8"} +furo = "^2022.12.7" + + [build-system] # The upper bounds here are defensive, intended to prevent situations like # #13849 and #14079 where we see buildtime or runtime errors caused by build diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 9e4ed3246e7a..1c0e6582f684 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -91,6 +91,7 @@ else "synapse" "docker" "tests" "scripts-dev" "contrib" "synmark" "stubs" ".ci" + "dev-docs" ) fi fi diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 106daa91844f..edc4b1768c5a 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -11,6 +11,119 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +""" +The Federation Sender is responsible for sending Persistent Data Units (PDUs) +and Ephemeral Data Units (EDUs) to other homeservers using +the `/send` Federation API. + + +## How do PDUs get sent? + +The Federation Sender is made aware of new PDUs due to `FederationSender.notify_new_events`. +When the sender is notified about a newly-persisted PDU that originates from this homeserver +and is not an out-of-band event, we pass the PDU to the `_PerDestinationQueue` for each +remote homeserver that is in the room at that point in the DAG. + + +### Per-Destination Queues + +There is one `PerDestinationQueue` per 'destination' homeserver. +The `PerDestinationQueue` maintains the following information about the destination: + +- whether the destination is currently in [catch-up mode (see below)](#catch-up-mode); +- a queue of PDUs to be sent to the destination; and +- a queue of EDUs to be sent to the destination (not considered in this section). + +Upon a new PDU being enqueued, `attempt_new_transaction` is called to start a new +transaction if there is not already one in progress. + + +### Transactions and the Transaction Transmission Loop + +Each federation HTTP request to the `/send` endpoint is referred to as a 'transaction'. +The body of the HTTP request contains a list of PDUs and EDUs to send to the destination. + +The *Transaction Transmission Loop* (`_transaction_transmission_loop`) is responsible +for emptying the queued PDUs (and EDUs) from a `PerDestinationQueue` by sending +them to the destination. + +There can only be one transaction in flight for a given destination at any time. +(Other than preventing us from overloading the destination, this also makes it easier to +reason about because we process events sequentially for each destination. +This is useful for *Catch-Up Mode*, described later.) + +The loop continues so long as there is anything to send. At each iteration of the loop, we: + +- dequeue up to 50 PDUs (and up to 100 EDUs). +- make the `/send` request to the destination homeserver with the dequeued PDUs and EDUs. +- if successful, make note of the fact that we succeeded in transmitting PDUs up to + the given `stream_ordering` of the latest PDU by +- if unsuccessful, back off from the remote homeserver for some time. + If we have been unsuccessful for too long (when the backoff interval grows to exceed 1 hour), + the in-memory queues are emptied and we enter [*Catch-Up Mode*, described below](#catch-up-mode). + + +### Catch-Up Mode + +When the `PerDestinationQueue` has the catch-up flag set, the *Catch-Up Transmission Loop* +(`_catch_up_transmission_loop`) is used in lieu of the regular `_transaction_transmission_loop`. +(Only once the catch-up mode has been exited can the regular tranaction transmission behaviour +be resumed.) + +*Catch-Up Mode*, entered upon Synapse startup or once a homeserver has fallen behind due to +connection problems, is responsible for sending PDUs that have been missed by the destination +homeserver. (PDUs can be missed because the `PerDestinationQueue` is volatile — i.e. resets +on startup — and it does not hold PDUs forever if `/send` requests to the destination fail.) + +The catch-up mechanism makes use of the `last_successful_stream_ordering` column in the +`destinations` table (which gives the `stream_ordering` of the most recent successfully +sent PDU) and the `stream_ordering` column in the `destination_rooms` table (which gives, +for each room, the `stream_ordering` of the most recent PDU that needs to be sent to this +destination). + +Each iteration of the loop pulls out 50 `destination_rooms` entries with the oldest +`stream_ordering`s that are greater than the `last_successful_stream_ordering`. +In other words, from the set of latest PDUs in each room to be sent to the destination, +the 50 oldest such PDUs are pulled out. + +These PDUs could, in principle, now be directly sent to the destination. However, as an +optimisation intended to prevent overloading destination homeservers, we instead attempt +to send the latest forward extremities so long as the destination homeserver is still +eligible to receive those. +This reduces load on the destination **in aggregate** because all Synapse homeservers +will behave according to this principle and therefore avoid sending lots of different PDUs +at different points in the DAG to a recovering homeserver. +*This optimisation is not currently valid in rooms which are partial-state on this homeserver, +since we are unable to determine whether the destination homeserver is eligible to receive +the latest forward extremities unless this homeserver sent those PDUs — in this case, we +just send the latest PDUs originating from this server and skip this optimisation.* + +Whilst PDUs are sent through this mechanism, the position of `last_successful_stream_ordering` +is advanced as normal. +Once there are no longer any rooms containing outstanding PDUs to be sent to the destination +*that are not already in the `PerDestinationQueue` because they arrived since Catch-Up Mode +was enabled*, Catch-Up Mode is exited and we return to `_transaction_transmission_loop`. + + +#### A note on failures and back-offs + +If a remote server is unreachable over federation, we back off from that server, +with an exponentially-increasing retry interval. +Whilst we don't automatically retry after the interval, we prevent making new attempts +until such time as the back-off has cleared. +Once the back-off is cleared and a new PDU or EDU arrives for transmission, the transmission +loop resumes and empties the queue by making federation requests. + +If the backoff grows too large (> 1 hour), the in-memory queue is emptied (to prevent +unbounded growth) and Catch-Up Mode is entered. + +It is worth noting that the back-off for a remote server is cleared once an inbound +request from that remote server is received (see `notify_remote_server_up`). +At this point, the transaction transmission loop is also started up, to proactively +send missed PDUs and EDUs to the destination (i.e. you don't need to wait for a new PDU +or EDU, destined for that destination, to be created in order to send out missed PDUs and +EDUs). +""" import abc import logging From ce00e57a2acc7c06d0e3bf4c9880dc830c6938ef Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 24 Mar 2023 17:54:58 +0000 Subject: [PATCH 017/106] Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. (#15319) --- .gitignore | 2 +- changelog.d/15319.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15319.misc diff --git a/.gitignore b/.gitignore index 96c451258ee6..9d037f28e758 100644 --- a/.gitignore +++ b/.gitignore @@ -62,7 +62,7 @@ book/ # complement /complement-* -/master.tar.gz +/main.tar.gz # rust /target/ diff --git a/changelog.d/15319.misc b/changelog.d/15319.misc new file mode 100644 index 000000000000..339e5b347dcf --- /dev/null +++ b/changelog.d/15319.misc @@ -0,0 +1 @@ +Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. \ No newline at end of file From 43411a0fd891235dc10e195e88d44794918e957a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:03:02 +0100 Subject: [PATCH 018/106] Bump serde from 1.0.157 to 1.0.158 (#15324) * Bump serde from 1.0.157 to 1.0.158 Bumps [serde](https://github.com/serde-rs/serde) from 1.0.157 to 1.0.158. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.157...v1.0.158) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 14 +++++++------- changelog.d/15324.misc | 1 + 2 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 changelog.d/15324.misc diff --git a/Cargo.lock b/Cargo.lock index 2bad27154391..910c4f225040 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,22 +323,22 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.157" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707de5fcf5df2b5788fca98dd7eab490bc2fd9b7ef1404defc462833b83f25ca" +checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.157" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78997f4555c22a7971214540c4a661291970619afd56de19f77e0de86296e1e5" +checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.2", + "syn 2.0.10", ] [[package]] @@ -377,9 +377,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.2" +version = "2.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59d3276aee1fa0c33612917969b5172b5be2db051232a6e4826f1a1a9191b045" +checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/15324.misc b/changelog.d/15324.misc new file mode 100644 index 000000000000..2908c82a2a4f --- /dev/null +++ b/changelog.d/15324.misc @@ -0,0 +1 @@ +Bump serde from 1.0.157 to 1.0.158. From 1ad142782aad26d2393cfe22946f2d020f9761a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:03:58 +0100 Subject: [PATCH 019/106] Bump regex from 1.7.1 to 1.7.3 (#15325) * Bump regex from 1.7.1 to 1.7.3 Bumps [regex](https://github.com/rust-lang/regex) from 1.7.1 to 1.7.3. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.7.1...1.7.3) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 8 ++++---- changelog.d/15325.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15325.misc diff --git a/Cargo.lock b/Cargo.lock index 910c4f225040..68246b9b28a2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -294,9 +294,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.1" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "aho-corasick", "memchr", @@ -305,9 +305,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.27" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "ryu" diff --git a/changelog.d/15325.misc b/changelog.d/15325.misc new file mode 100644 index 000000000000..51af16f7e897 --- /dev/null +++ b/changelog.d/15325.misc @@ -0,0 +1 @@ +Bump regex from 1.7.1 to 1.7.3. From fae4a2c066ecdadc05362c8be166461e4e024320 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:04:09 +0100 Subject: [PATCH 020/106] Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0 (#15326) * Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0 Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.0.0.4 to 23.1.0.0. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15326.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15326.misc diff --git a/changelog.d/15326.misc b/changelog.d/15326.misc new file mode 100644 index 000000000000..6238b7ff804d --- /dev/null +++ b/changelog.d/15326.misc @@ -0,0 +1 @@ +Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. diff --git a/poetry.lock b/poetry.lock index d42b3a57102e..73929b7d8b4c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3081,14 +3081,14 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.0.0.4" +version = "23.1.0.0" description = "Typing stubs for pyOpenSSL" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.0.0.4.tar.gz", hash = "sha256:8b3550b6e19d51ce78aabd724b0d8ebd962081a5fce95e7f85a592dfcdbc16bf"}, - {file = "types_pyOpenSSL-23.0.0.4-py3-none-any.whl", hash = "sha256:ad49e15bb8bb2f251b8fc24776f414d877629e44b1b049240063ab013b5a6a7d"}, + {file = "types-pyOpenSSL-23.1.0.0.tar.gz", hash = "sha256:acc153718bff497e8f6ca3beecb5ea7a3087c796e40d569fded8bafbfca73605"}, + {file = "types_pyOpenSSL-23.1.0.0-py3-none-any.whl", hash = "sha256:9dacec020a3484ef5e4ea4bd9d403a981765b80821d5a40b790b2ba2f09d58db"}, ] [package.dependencies] From 316044d6fa57251e6c24d2e0057310855341dcda Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:05:10 +0100 Subject: [PATCH 021/106] Bump furo from 2022.12.7 to 2023.3.23 (#15327) * Bump furo from 2022.12.7 to 2023.3.23 Bumps [furo](https://github.com/pradyunsg/furo) from 2022.12.7 to 2023.3.23. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2022.12.07...2023.03.23) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15327.misc | 1 + poetry.lock | 8 ++++---- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 changelog.d/15327.misc diff --git a/changelog.d/15327.misc b/changelog.d/15327.misc new file mode 100644 index 000000000000..e5813f1aa985 --- /dev/null +++ b/changelog.d/15327.misc @@ -0,0 +1 @@ +Bump furo from 2022.12.7 to 2023.3.23. diff --git a/poetry.lock b/poetry.lock index 73929b7d8b4c..169008888607 100644 --- a/poetry.lock +++ b/poetry.lock @@ -580,14 +580,14 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", [[package]] name = "furo" -version = "2022.12.7" +version = "2023.3.23" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "furo-2022.12.7-py3-none-any.whl", hash = "sha256:7cb76c12a25ef65db85ab0743df907573d03027a33631f17d267e598ebb191f7"}, - {file = "furo-2022.12.7.tar.gz", hash = "sha256:d8008f8efbe7587a97ba533c8b2df1f9c21ee9b3e5cad0d27f61193d38b1a986"}, + {file = "furo-2023.3.23-py3-none-any.whl", hash = "sha256:1cdf0730496f6ac0ecf394845fe55010539d987a3085f29d819e49a8e87da60a"}, + {file = "furo-2023.3.23.tar.gz", hash = "sha256:6cf9a59fe2919693ecff6509a08229afa081583cbb5b81f59c3e755f3bd81d26"}, ] [package.dependencies] @@ -3426,4 +3426,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "1455d7b3de98a85a0361ae7018c5cde92264aa35d0bf842d79fe4d778787bbf6" +content-hash = "0a1dd4be3dff3c8cc71bd57a4eb48e1d92f155db7230e61fbb54f8af03619509" diff --git a/pyproject.toml b/pyproject.toml index d10c390043e3..b04edb611de8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -359,7 +359,7 @@ optional = true sphinx = {version = "^6.1", python = "^3.8"} sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"} myst-parser = {version = "^1.0.0", python = "^3.8"} -furo = "^2022.12.7" +furo = ">=2022.12.7,<2024.0.0" [build-system] From 7d3ea4886c09c3c3557a876521ed99e58f6f13f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:05:43 +0100 Subject: [PATCH 022/106] Bump mypy-zope from 0.9.0 to 0.9.1 (#15330) * Bump mypy-zope from 0.9.0 to 0.9.1 Bumps [mypy-zope](https://github.com/Shoobx/mypy-zope) from 0.9.0 to 0.9.1. - [Release notes](https://github.com/Shoobx/mypy-zope/releases) - [Changelog](https://github.com/Shoobx/mypy-zope/blob/master/CHANGELOG.md) - [Commits](https://github.com/Shoobx/mypy-zope/compare/0.9.0...0.9.1) --- updated-dependencies: - dependency-name: mypy-zope dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15330.misc | 1 + poetry.lock | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15330.misc diff --git a/changelog.d/15330.misc b/changelog.d/15330.misc new file mode 100644 index 000000000000..d60e63483e1e --- /dev/null +++ b/changelog.d/15330.misc @@ -0,0 +1 @@ +Bump mypy-zope from 0.9.0 to 0.9.1. diff --git a/poetry.lock b/poetry.lock index 169008888607..01b48edc4f01 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1520,18 +1520,18 @@ files = [ [[package]] name = "mypy-zope" -version = "0.9.0" +version = "0.9.1" description = "Plugin for mypy to support zope interfaces" category = "dev" optional = false python-versions = "*" files = [ - {file = "mypy-zope-0.9.0.tar.gz", hash = "sha256:88bf6cd056e38b338e6956055958a7805b4ff84404ccd99e29883a3647a1aeb3"}, - {file = "mypy_zope-0.9.0-py3-none-any.whl", hash = "sha256:e1bb4b57084f76ff8a154a3e07880a1af2ac6536c491dad4b143d529f72c5d15"}, + {file = "mypy-zope-0.9.1.tar.gz", hash = "sha256:4c87dbc71fec35f6533746ecdf9d400cd9281338d71c16b5676bb5ed00a97ca2"}, + {file = "mypy_zope-0.9.1-py3-none-any.whl", hash = "sha256:733d4399affe9e61e332ce9c4049418d6775c39b473e4b9f409d51c207c1b71a"}, ] [package.dependencies] -mypy = "1.0.0" +mypy = ">=1.0.0,<1.1.0" "zope.interface" = "*" "zope.schema" = "*" From 7a892ce7936c9323116b18e6d4e28bca7004dee8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:23:51 +0100 Subject: [PATCH 023/106] Bump cryptography from 40.0.0 to 40.0.1 (#15329) * Bump cryptography from 40.0.0 to 40.0.1 Bumps [cryptography](https://github.com/pyca/cryptography) from 40.0.0 to 40.0.1. - [Release notes](https://github.com/pyca/cryptography/releases) - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/40.0.0...40.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15329.misc | 1 + poetry.lock | 40 ++++++++++++++++++++-------------------- 2 files changed, 21 insertions(+), 20 deletions(-) create mode 100644 changelog.d/15329.misc diff --git a/changelog.d/15329.misc b/changelog.d/15329.misc new file mode 100644 index 000000000000..eea38b7a42da --- /dev/null +++ b/changelog.d/15329.misc @@ -0,0 +1 @@ +Bump cryptography from 40.0.0 to 40.0.1. diff --git a/poetry.lock b/poetry.lock index 01b48edc4f01..294ce49a8d9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -481,31 +481,31 @@ files = [ [[package]] name = "cryptography" -version = "40.0.0" +version = "40.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-40.0.0-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:168ded448fb5d82dfa911156ab8b13b1716de65bd50ff977f4657643f998fa05"}, - {file = "cryptography-40.0.0-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:71cb346b9dd1537102e7466a2d629385b01847f8d96cd7405f0e717d91cebc8e"}, - {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e63fb48e2615cfab5a9c4bb457d35e7ae03ea8593996bfbe257e78244d12d0"}, - {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2c4134d29cdce0735c16abf48fa8435f001a7b0031e68dd9a9ee1c80a29374a"}, - {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:754dc5ab648113dc54197f242db43234a04e4d61193fb5d3ebb42bd569dca571"}, - {file = "cryptography-40.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43089be365c0ca4235c6e4e781f3bc125bc1fff576c9dd22cdfb585309b9bb9d"}, - {file = "cryptography-40.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:23c42c59c2b5b9ddc6a85b5c46b8fabc4d63a1714f4dbea4bf20d25690bf2365"}, - {file = "cryptography-40.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6b36e2864e04c82634879c7e7aad48824b1847fdb06b64cd410d2ec5e51d1b31"}, - {file = "cryptography-40.0.0-cp36-abi3-win32.whl", hash = "sha256:e917a07094217edeefe8f6ea960b45d7aab650b982e4209da078332cc9d3ac3a"}, - {file = "cryptography-40.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:fba36ec552794a06a07ac8bdc5ad83a587f6959d98547f373d401975d55c7c9e"}, - {file = "cryptography-40.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fafa997b9e6818db333ded4b379f5b7679b48bd88ac878428cea2a1aa6e79fd8"}, - {file = "cryptography-40.0.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b05c9f25a1ea42e427085230815bbdebe15a53bb6163c4c06022e5630645046b"}, - {file = "cryptography-40.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14da8c26755ffa5c7863ffa5e8b87cb9596a21b6c34852cb19e0f48c226c64fb"}, - {file = "cryptography-40.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7162ae4530958114ca2eee30a56eca46527def33493f622f059dc2e825fd0913"}, - {file = "cryptography-40.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e5855a80c77565fe2464e88e0095764e25d8ddb2d24df2b1d31773e80be94435"}, - {file = "cryptography-40.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:34f502619964210939bb7ee7cd5df53178534eb08d3526f941695a8f7aa0efe4"}, - {file = "cryptography-40.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:722cfddae79684166840be2cbbae154f44a455519e644b60bf274a50ccb834db"}, - {file = "cryptography-40.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7cc9fc3ffcb766c313ed0515d77d0deabb4f36bdcff3a9f115c43e5ec611b82a"}, - {file = "cryptography-40.0.0.tar.gz", hash = "sha256:f421f6777592eb199ca8abac7c20b9ecef27c50ad63546e6c614b29771b46d0d"}, + {file = "cryptography-40.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917"}, + {file = "cryptography-40.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356"}, + {file = "cryptography-40.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122"}, + {file = "cryptography-40.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2"}, + {file = "cryptography-40.0.1-cp36-abi3-win32.whl", hash = "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db"}, + {file = "cryptography-40.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c"}, + {file = "cryptography-40.0.1.tar.gz", hash = "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472"}, ] [package.dependencies] From 4fc85e5a921c7200a54a7fd1e9b56b5d2fedc453 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Mon, 27 Mar 2023 13:37:17 +0200 Subject: [PATCH 024/106] Load `/password_policy` endpoint on workers. (#15331) --- changelog.d/15331.feature | 1 + docker/configure_workers_and_start.py | 1 + docs/workers.md | 1 + synapse/rest/__init__.py | 3 +-- synapse/rest/client/password_policy.py | 1 + 5 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15331.feature diff --git a/changelog.d/15331.feature b/changelog.d/15331.feature new file mode 100644 index 000000000000..b4c2eddc48ce --- /dev/null +++ b/changelog.d/15331.feature @@ -0,0 +1 @@ +Allow loading `/password_policy` endpoint on workers. \ No newline at end of file diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 3f2f5c2daf85..2a50ee1e4b4e 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -172,6 +172,7 @@ "^/_matrix/client/v1/rooms/.*/timestamp_to_event$", "^/_matrix/client/(api/v1|r0|v3|unstable)/search", "^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)", + "^/_matrix/client/(r0|v3|unstable)/password_policy$", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index bf7690f5aff2..e9a477d32c89 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -247,6 +247,7 @@ information. ^/_matrix/client/(r0|v3|unstable)/register$ ^/_matrix/client/(r0|v3|unstable)/register/available$ ^/_matrix/client/v1/register/m.login.registration_token/validity$ + ^/_matrix/client/(r0|v3|unstable)/password_policy$ # Event sending requests ^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 55b448adfdba..1d7c11b42d09 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -138,8 +138,7 @@ def register_servlets(client_resource: HttpServer, hs: "HomeServer") -> None: capabilities.register_servlets(hs, client_resource) account_validity.register_servlets(hs, client_resource) relations.register_servlets(hs, client_resource) - if is_main_process: - password_policy.register_servlets(hs, client_resource) + password_policy.register_servlets(hs, client_resource) knock.register_servlets(hs, client_resource) appservice_ping.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/password_policy.py b/synapse/rest/client/password_policy.py index 9f1908004b9d..0ee4f9da16f1 100644 --- a/synapse/rest/client/password_policy.py +++ b/synapse/rest/client/password_policy.py @@ -31,6 +31,7 @@ class PasswordPolicyServlet(RestServlet): PATTERNS = client_patterns("/password_policy$") + CATEGORY = "Registration/login requests" def __init__(self, hs: "HomeServer"): super().__init__() From 96f163d932626e2c95a9ebdda293a14ee5b1dfea Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Mar 2023 14:32:36 +0100 Subject: [PATCH 025/106] Prune old typing notifications (#15332) Rather than keeping them around forever in memory, slowing things down. Fixes #11750. --- changelog.d/15332.bugfix | 1 + synapse/handlers/typing.py | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 changelog.d/15332.bugfix diff --git a/changelog.d/15332.bugfix b/changelog.d/15332.bugfix new file mode 100644 index 000000000000..ca6fb1d2fd79 --- /dev/null +++ b/changelog.d/15332.bugfix @@ -0,0 +1 @@ +Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 3f656ea4f508..39ae44ea95fc 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -52,6 +52,11 @@ class RoomMember: FEDERATION_PING_INTERVAL = 40 * 1000 +# How long to remember a typing notification happened in a room before +# forgetting about it. +FORGET_TIMEOUT = 10 * 60 * 1000 + + class FollowerTypingHandler: """A typing handler on a different process than the writer that is updated via replication. @@ -83,7 +88,10 @@ def __init__(self, hs: "HomeServer"): self.wheel_timer: WheelTimer[RoomMember] = WheelTimer(bucket_size=5000) self._latest_room_serial = 0 + self._rooms_updated: Set[str] = set() + self.clock.looping_call(self._handle_timeouts, 5000) + self.clock.looping_call(self._prune_old_typing, FORGET_TIMEOUT) def _reset(self) -> None: """Reset the typing handler's data caches.""" @@ -92,6 +100,8 @@ def _reset(self) -> None: # map room IDs to sets of users currently typing self._room_typing = {} + self._rooms_updated = set() + self._member_last_federation_poke = {} self.wheel_timer = WheelTimer(bucket_size=5000) @@ -178,6 +188,7 @@ def process_replication_rows( prev_typing = self._room_typing.get(row.room_id, set()) now_typing = set(row.user_ids) self._room_typing[row.room_id] = now_typing + self._rooms_updated.add(row.room_id) if self.federation: run_as_background_process( @@ -209,6 +220,19 @@ async def _send_changes_in_typing_to_remotes( def get_current_token(self) -> int: return self._latest_room_serial + def _prune_old_typing(self) -> None: + """Prune rooms that haven't seen typing updates since last time. + + This is safe to do as clients should time out old typing notifications. + """ + stale_rooms = self._room_serials.keys() - self._rooms_updated + + for room_id in stale_rooms: + self._room_serials.pop(room_id, None) + self._room_typing.pop(room_id, None) + + self._rooms_updated = set() + class TypingWriterHandler(FollowerTypingHandler): def __init__(self, hs: "HomeServer"): @@ -388,6 +412,7 @@ def _push_update_local(self, member: RoomMember, typing: bool) -> None: self._typing_stream_change_cache.entity_has_changed( member.room_id, self._latest_room_serial ) + self._rooms_updated.add(member.room_id) self.notifier.on_new_event( StreamKeyType.TYPING, self._latest_room_serial, rooms=[member.room_id] From bd4d958aaf7c2123abb3665e7a7b199cf8ce27ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 09:46:47 +0100 Subject: [PATCH 026/106] Bump ruff from 0.0.252 to 0.0.259 (#15328) * Bump ruff from 0.0.252 to 0.0.259 Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.252 to 0.0.259. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/charliermarsh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.252...v0.0.259) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Fix new warnings * Mypy * Newsfile --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Erik Johnston --- changelog.d/15328.misc | 1 + poetry.lock | 38 +++++++++---------- pyproject.toml | 2 +- synapse/events/__init__.py | 4 +- synapse/events/utils.py | 2 +- synapse/storage/database.py | 4 +- synapse/storage/databases/main/events.py | 5 +-- synapse/storage/databases/main/pusher.py | 2 +- synapse/storage/databases/main/stats.py | 14 ++++++- synapse/storage/databases/main/stream.py | 5 ++- .../replication/slave/storage/test_events.py | 2 +- tests/server.py | 10 ++++- 12 files changed, 54 insertions(+), 35 deletions(-) create mode 100644 changelog.d/15328.misc diff --git a/changelog.d/15328.misc b/changelog.d/15328.misc new file mode 100644 index 000000000000..e3e595333229 --- /dev/null +++ b/changelog.d/15328.misc @@ -0,0 +1 @@ +Bump ruff from 0.0.252 to 0.0.259. diff --git a/poetry.lock b/poetry.lock index 294ce49a8d9c..978a6e159838 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2323,29 +2323,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.0.252" +version = "0.0.259" description = "An extremely fast Python linter, written in Rust." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.252-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:349367a227c4db7abbc3a9993efea8a608b5bea4bb4a1e5fc6f0d56819524f92"}, - {file = "ruff-0.0.252-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:ce77f9106d96b4faf7865860fb5155b9deaf6f699d9c279118c5ad947739ecaf"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edadb0b050293b4e60dab979ba6a4e734d9c899cbe316a0ee5b65e3cdd39c750"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4efdae98937d1e4d23ab0b7fc7e8e6b6836cc7d2d42238ceeacbc793ef780542"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8546d879f7d3f669379a03e7b103d90e11901976ab508aeda59c03dfd8a359e"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83fdc7169b6c1fb5fe8d1cdf345697f558c1b433ef97df9ca11defa2a8f3ee9e"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84ed9be1a17e2a556a571a5b959398633dd10910abd8dcf8b098061e746e892d"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f5e77bd9ba4438cf2ee32154e2673afe22f538ef29f5d65ca47e3dc46c42cf8"}, - {file = "ruff-0.0.252-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5179b94b45c0f8512eaff3ab304c14714a46df2e9ca72a9d96084adc376b71"}, - {file = "ruff-0.0.252-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:92efd8a71157595df5bc46aaaa0613d8a2fbc5cddc53ae7b749c16025c324732"}, - {file = "ruff-0.0.252-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd350fc10832cfd28e681d829a8aa83ea3e653326e0ea9d98637dfb8d46177d2"}, - {file = "ruff-0.0.252-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f119240c9631216e846166e06023b1d878e25fbac93bf20da50069e91cfbfaee"}, - {file = "ruff-0.0.252-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5c5a49f89f5ede93d16eddfeeadd7e5739ec703e8f63ac95eac30236b9e49da3"}, - {file = "ruff-0.0.252-py3-none-win32.whl", hash = "sha256:89a897dc743f2fe063483ea666097e72e848f4bbe40493fe0533e61799959f6e"}, - {file = "ruff-0.0.252-py3-none-win_amd64.whl", hash = "sha256:cdc89ad6ff88519b1fb1816ac82a9ad910762c90ff5fd64dda7691b72d36aff7"}, - {file = "ruff-0.0.252-py3-none-win_arm64.whl", hash = "sha256:4b594a17cf53077165429486650658a0e1b2ac6ab88954f5afd50d2b1b5657a9"}, - {file = "ruff-0.0.252.tar.gz", hash = "sha256:6992611ab7bdbe7204e4831c95ddd3febfeece2e6f5e44bbed044454c7db0f63"}, + {file = "ruff-0.0.259-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"}, + {file = "ruff-0.0.259-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b"}, + {file = "ruff-0.0.259-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a"}, + {file = "ruff-0.0.259-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577"}, + {file = "ruff-0.0.259-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0"}, + {file = "ruff-0.0.259-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d"}, + {file = "ruff-0.0.259-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9"}, + {file = "ruff-0.0.259-py3-none-win32.whl", hash = "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456"}, + {file = "ruff-0.0.259-py3-none-win_amd64.whl", hash = "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8"}, + {file = "ruff-0.0.259-py3-none-win_arm64.whl", hash = "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086"}, + {file = "ruff-0.0.259.tar.gz", hash = "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec"}, ] [[package]] @@ -3426,4 +3426,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.7.1" -content-hash = "0a1dd4be3dff3c8cc71bd57a4eb48e1d92f155db7230e61fbb54f8af03619509" +content-hash = "102eed4faa13eab195555ea070f235acd1e3f0ff9cf028afcac6c51b3e409071" diff --git a/pyproject.toml b/pyproject.toml index b04edb611de8..9a6306ee701b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -311,7 +311,7 @@ all = [ # We pin black so that our tests don't start failing on new releases. isort = ">=5.10.1" black = ">=22.3.0" -ruff = "0.0.252" +ruff = "0.0.259" # Typechecking mypy = "*" diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 91118a8d847f..d475fe7ae526 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -462,7 +462,7 @@ def __init__( # Signatures is a dict of dicts, and this is faster than doing a # copy.deepcopy signatures = { - name: {sig_id: sig for sig_id, sig in sigs.items()} + name: dict(sigs.items()) for name, sigs in event_dict.pop("signatures", {}).items() } @@ -510,7 +510,7 @@ def __init__( # Signatures is a dict of dicts, and this is faster than doing a # copy.deepcopy signatures = { - name: {sig_id: sig for sig_id, sig in sigs.items()} + name: dict(sigs.items()) for name, sigs in event_dict.pop("signatures", {}).items() } diff --git a/synapse/events/utils.py b/synapse/events/utils.py index e41c7a4b83e6..c14c7791db27 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -355,7 +355,7 @@ def serialize_event( time_now_ms = int(time_now_ms) # Should this strip out None's? - d = {k: v for k, v in e.get_dict().items()} + d = dict(e.get_dict().items()) d["event_id"] = e.event_id diff --git a/synapse/storage/database.py b/synapse/storage/database.py index fec4ae5b970d..226ccc1671ad 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1504,8 +1504,8 @@ def simple_upsert_many_txn_emulated( self.engine.lock_table(txn, "user_ips") for keyv, valv in zip(key_values, value_values): - _keys = {x: y for x, y in zip(key_names, keyv)} - _vals = {x: y for x, y in zip(value_names, valv)} + _keys = dict(zip(key_names, keyv)) + _vals = dict(zip(value_names, valv)) self.simple_upsert_txn_emulated(txn, table, _keys, _vals, lock=False) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 193959b25022..ccd9f9d14189 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -27,6 +27,7 @@ Optional, Set, Tuple, + cast, ) import attr @@ -1348,9 +1349,7 @@ def _update_outliers_txn( [event.event_id for event, _ in events_and_contexts], ) - have_persisted: Dict[str, bool] = { - event_id: outlier for event_id, outlier in txn - } + have_persisted = dict(cast(Iterable[Tuple[str, bool]], txn)) logger.debug( "_update_outliers_txn: events=%s have_persisted=%s", diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index ab76b754e0e8..aeb6034f46da 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -518,7 +518,7 @@ async def _set_device_id_for_pushers( def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: txn.execute( """ - SELECT + SELECT p.id AS pusher_id, p.device_id AS pusher_device_id, at.device_id AS token_device_id diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index d3393d8e493e..97c4dc2603d6 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -16,7 +16,17 @@ import logging from enum import Enum from itertools import chain -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Optional, + Tuple, + Union, + cast, +) from typing_extensions import Counter @@ -523,7 +533,7 @@ def _fetch_current_state_stats( """, (room_id,), ) - membership_counts = {membership: cnt for membership, cnt in txn} + membership_counts = dict(cast(Iterable[Tuple[str, int]], txn)) txn.execute( """ diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 2b8779bbb8a1..92cbe262a6c9 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -41,6 +41,7 @@ Any, Collection, Dict, + Iterable, List, Optional, Set, @@ -1343,7 +1344,9 @@ def _reset_federation_positions_txn(self, txn: LoggingTransaction) -> None: GROUP BY type """ txn.execute(sql) - min_positions = {typ: pos for typ, pos in txn} # Map from type -> min position + min_positions = dict( + cast(Iterable[Tuple[str, int]], txn) + ) # Map from type -> min position # Ensure we do actually have some values here assert set(min_positions) == {"federation", "events"} diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py index 57c781a0c3fd..b2125b1fea41 100644 --- a/tests/replication/slave/storage/test_events.py +++ b/tests/replication/slave/storage/test_events.py @@ -412,7 +412,7 @@ def build_event( self.get_success( self.master_store.add_push_actions_to_staging( event.event_id, - {user_id: actions for user_id, actions in push_actions}, + dict(push_actions), False, "main", ) diff --git a/tests/server.py b/tests/server.py index 5de97227669c..bb059630fa15 100644 --- a/tests/server.py +++ b/tests/server.py @@ -983,7 +983,9 @@ def cleanup() -> None: dropped = True except psycopg2.OperationalError as e: warnings.warn( - "Couldn't drop old db: " + str(e), category=UserWarning + "Couldn't drop old db: " + str(e), + category=UserWarning, + stacklevel=2, ) time.sleep(0.5) @@ -991,7 +993,11 @@ def cleanup() -> None: db_conn.close() if not dropped: - warnings.warn("Failed to drop old DB.", category=UserWarning) + warnings.warn( + "Failed to drop old DB.", + category=UserWarning, + stacklevel=2, + ) if not LEAVE_DB: # Register the cleanup hook From 2e936afd5fbcfceae8db91123855b03c3342e767 Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 28 Mar 2023 13:31:04 +0100 Subject: [PATCH 027/106] Fix typo in developer docs GitHub workflow (#15336) Signed-off-by: Sean Quah --- .github/workflows/docs.yaml | 2 +- changelog.d/15336.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15336.misc diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 20a29e7cbf60..7443fd70d45f 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -80,7 +80,7 @@ jobs: needs: - pre steps: - - uses: action/checkout@v3 + - uses: actions/checkout@v3 - name: "Set up Sphinx" uses: matrix-org/setup-python-poetry@v1 diff --git a/changelog.d/15336.misc b/changelog.d/15336.misc new file mode 100644 index 000000000000..39c9fc82e919 --- /dev/null +++ b/changelog.d/15336.misc @@ -0,0 +1 @@ +Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. From 8a47bf13efae0b4d844c8fb59cf33663b185db46 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Mar 2023 15:42:24 +0100 Subject: [PATCH 028/106] Speed up pydantic CI lint (#15339) --- .github/workflows/tests.yml | 8 ++++++++ changelog.d/15339.misc | 1 + 2 files changed, 9 insertions(+) create mode 100644 changelog.d/15339.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7f57f046fc87..0e736cecb845 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -95,6 +95,14 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} + - name: Install Rust + # There don't seem to be versioned releases of this action per se: for each rust + # version there is a branch which gets constantly rebased on top of master. + # We pin to a specific commit for paranoia's sake. + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + with: + toolchain: 1.58.1 + - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: poetry-version: "1.3.2" diff --git a/changelog.d/15339.misc b/changelog.d/15339.misc new file mode 100644 index 000000000000..626f7ef5c007 --- /dev/null +++ b/changelog.d/15339.misc @@ -0,0 +1 @@ +Speed up pydantic CI job. From 57481ca6941f2750a750a3f7e460e2e56ee08562 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Mar 2023 16:17:15 +0100 Subject: [PATCH 029/106] Speed up generate sample config CI lint (#15340) --- .github/workflows/tests.yml | 8 ++++++++ changelog.d/15340.misc | 1 + 2 files changed, 9 insertions(+) create mode 100644 changelog.d/15340.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0e736cecb845..a2cec324a549 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,6 +34,14 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + - name: Install Rust + # There don't seem to be versioned releases of this action per se: for each rust + # version there is a branch which gets constantly rebased on top of master. + # We pin to a specific commit for paranoia's sake. + uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + with: + toolchain: 1.58.1 + - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: python-version: "3.x" diff --git a/changelog.d/15340.misc b/changelog.d/15340.misc new file mode 100644 index 000000000000..e2fe35d7c591 --- /dev/null +++ b/changelog.d/15340.misc @@ -0,0 +1 @@ +Speed up sample config CI job. From 5282ba1e2bbff2635dc09aec45fd42a56c1a4545 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 28 Mar 2023 14:26:27 -0400 Subject: [PATCH 030/106] Implement MSC3983 to proxy /keys/claim queries to appservices. (#15314) Experimental support for MSC3983 is behind a configuration flag. If enabled, for users which are exclusively owned by an application service then the appservice will be queried for one-time keys *if* there are none uploaded to Synapse. --- changelog.d/15314.feature | 1 + synapse/appservice/api.py | 56 ++++++++++++++ synapse/config/experimental.py | 5 ++ synapse/federation/federation_server.py | 20 ++--- synapse/handlers/appservice.py | 74 +++++++++++++++++- synapse/handlers/e2e_keys.py | 57 ++++++++++++-- .../storage/databases/main/end_to_end_keys.py | 36 ++++++--- tests/appservice/test_api.py | 59 ++++++++++++++ tests/handlers/test_e2e_keys.py | 76 ++++++++++++++++++- 9 files changed, 355 insertions(+), 29 deletions(-) create mode 100644 changelog.d/15314.feature diff --git a/changelog.d/15314.feature b/changelog.d/15314.feature new file mode 100644 index 000000000000..68b289b0cc7c --- /dev/null +++ b/changelog.d/15314.feature @@ -0,0 +1 @@ +Experimental support for passing One Time Key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983)). diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 4812fb44961f..51ee0e79df10 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -388,6 +388,62 @@ async def push_bulk( failed_transactions_counter.labels(service.id).inc() return False + async def claim_client_keys( + self, service: "ApplicationService", query: List[Tuple[str, str, str]] + ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: + """Claim one time keys from an application service. + + Args: + query: An iterable of tuples of (user ID, device ID, algorithm). + + Returns: + A tuple of: + A map of user ID -> a map device ID -> a map of key ID -> JSON dict. + + A copy of the input which has not been fulfilled because the + appservice doesn't support this endpoint or has not returned + data for that tuple. + """ + if service.url is None: + return {}, query + + # This is required by the configuration. + assert service.hs_token is not None + + # Create the expected payload shape. + body: Dict[str, Dict[str, List[str]]] = {} + for user_id, device, algorithm in query: + body.setdefault(user_id, {}).setdefault(device, []).append(algorithm) + + uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim" + try: + response = await self.post_json_get_json( + uri, + body, + headers={"Authorization": [f"Bearer {service.hs_token}"]}, + ) + except CodeMessageException as e: + # The appservice doesn't support this endpoint. + if e.code == 404 or e.code == 405: + return {}, query + logger.warning("claim_keys to %s received %s", uri, e.code) + return {}, query + except Exception as ex: + logger.warning("claim_keys to %s threw exception %s", uri, ex) + return {}, query + + # Check if the appservice fulfilled all of the queried user/device/algorithms + # or if some are still missing. + # + # TODO This places a lot of faith in the response shape being correct. + missing = [ + (user_id, device, algorithm) + for user_id, device, algorithm in query + if algorithm not in response.get(user_id, {}).get(device, []) + ] + + return response, missing + def _serialize( self, service: "ApplicationService", events: Iterable[EventBase] ) -> List[JsonDict]: diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 99dcd27c7426..53e6fc2b54d6 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -74,6 +74,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "msc3202_transaction_extensions", False ) + # MSC3983: Proxying OTK claim requests to exclusive ASes. + self.msc3983_appservice_otk_claims: bool = experimental.get( + "msc3983_appservice_otk_claims", False + ) + # MSC3706 (server-side support for partial state in /send_join responses) # Synapse will always serve partial state responses to requests using the stable # query parameter `omit_members`. If this flag is set, Synapse will also serve diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 6d99845de561..64e99292ec04 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -86,7 +86,7 @@ from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary from synapse.storage.roommember import MemberSummary from synapse.types import JsonDict, StateMap, get_domain_from_id -from synapse.util import json_decoder, unwrapFirstError +from synapse.util import unwrapFirstError from synapse.util.async_helpers import Linearizer, concurrently_execute, gather_results from synapse.util.caches.response_cache import ResponseCache from synapse.util.stringutils import parse_server_name @@ -135,6 +135,7 @@ def __init__(self, hs: "HomeServer"): self.state = hs.get_state_handler() self._event_auth_handler = hs.get_event_auth_handler() self._room_member_handler = hs.get_room_member_handler() + self._e2e_keys_handler = hs.get_e2e_keys_handler() self._state_storage_controller = hs.get_storage_controllers().state @@ -1012,15 +1013,14 @@ async def on_claim_client_keys( query.append((user_id, device_id, algorithm)) log_kv({"message": "Claiming one time keys.", "user, device pairs": query}) - results = await self.store.claim_e2e_one_time_keys(query) - - json_result: Dict[str, Dict[str, dict]] = {} - for user_id, device_keys in results.items(): - for device_id, keys in device_keys.items(): - for key_id, json_str in keys.items(): - json_result.setdefault(user_id, {})[device_id] = { - key_id: json_decoder.decode(json_str) - } + results = await self._e2e_keys_handler.claim_local_one_time_keys(query) + + json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + for result in results: + for user_id, device_keys in result.items(): + for device_id, keys in device_keys.items(): + for key_id, key in keys.items(): + json_result.setdefault(user_id, {})[device_id] = {key_id: key} logger.info( "Claimed one-time-keys: %s", diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index ec3ab968e925..953df4d9cd26 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -12,7 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Union +from typing import ( + TYPE_CHECKING, + Collection, + Dict, + Iterable, + List, + Optional, + Tuple, + Union, +) from prometheus_client import Counter @@ -829,3 +838,66 @@ async def _check_user_exists(self, user_id: str) -> bool: if unknown_user: return await self.query_user_exists(user_id) return True + + async def claim_e2e_one_time_keys( + self, query: Iterable[Tuple[str, str, str]] + ) -> Tuple[ + Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]], List[Tuple[str, str, str]] + ]: + """Claim one time keys from application services. + + Args: + query: An iterable of tuples of (user ID, device ID, algorithm). + + Returns: + A tuple of: + An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes. + + A copy of the input which has not been fulfilled (either because + they are not appservice users or the appservice does not support + providing OTKs). + """ + services = self.store.get_app_services() + + # Partition the users by appservice. + query_by_appservice: Dict[str, List[Tuple[str, str, str]]] = {} + missing = [] + for user_id, device, algorithm in query: + if not self.store.get_if_app_services_interested_in_user(user_id): + missing.append((user_id, device, algorithm)) + continue + + # Find the associated appservice. + for service in services: + if service.is_exclusive_user(user_id): + query_by_appservice.setdefault(service.id, []).append( + (user_id, device, algorithm) + ) + continue + + # Query each service in parallel. + results = await make_deferred_yieldable( + defer.DeferredList( + [ + run_in_background( + self.appservice_api.claim_client_keys, + # We know this must be an app service. + self.store.get_app_service_by_id(service_id), # type: ignore[arg-type] + service_query, + ) + for service_id, service_query in query_by_appservice.items() + ], + consumeErrors=True, + ) + ) + + # Patch together the results -- they are all independent (since they + # require exclusive control over the users). They get returned as a list + # and the caller combines them. + claimed_keys: List[Dict[str, Dict[str, Dict[str, JsonDict]]]] = [] + for success, result in results: + if success: + claimed_keys.append(result[0]) + missing.extend(result[1]) + + return claimed_keys, missing diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 4e9c8d8db0c7..9e7c2c45b557 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -13,7 +13,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import logging from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Mapping, Optional, Tuple @@ -53,6 +52,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.federation = hs.get_federation_client() self.device_handler = hs.get_device_handler() + self._appservice_handler = hs.get_application_service_handler() self.is_mine = hs.is_mine self.clock = hs.get_clock() @@ -88,6 +88,10 @@ def __init__(self, hs: "HomeServer"): max_count=10, ) + self._query_appservices_for_otks = ( + hs.config.experimental.msc3983_appservice_otk_claims + ) + @trace @cancellable async def query_devices( @@ -542,6 +546,42 @@ async def on_federation_query_client_keys( return ret + async def claim_local_one_time_keys( + self, local_query: List[Tuple[str, str, str]] + ) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]: + """Claim one time keys for local users. + + 1. Attempt to claim OTKs from the database. + 2. Ask application services if they provide OTKs. + 3. Attempt to fetch fallback keys from the database. + + Args: + local_query: An iterable of tuples of (user ID, device ID, algorithm). + + Returns: + An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes. + """ + + otk_results, not_found = await self.store.claim_e2e_one_time_keys(local_query) + + # If the application services have not provided any keys via the C-S + # API, query it directly for one-time keys. + if self._query_appservices_for_otks: + ( + appservice_results, + not_found, + ) = await self._appservice_handler.claim_e2e_one_time_keys(not_found) + else: + appservice_results = [] + + # For each user that does not have a one-time keys available, see if + # there is a fallback key. + fallback_results = await self.store.claim_e2e_fallback_keys(not_found) + + # Return the results in order, each item from the input query should + # only appear once in the combined list. + return (otk_results, *appservice_results, fallback_results) + @trace async def claim_one_time_keys( self, query: Dict[str, Dict[str, Dict[str, str]]], timeout: Optional[int] @@ -561,17 +601,18 @@ async def claim_one_time_keys( set_tag("local_key_query", str(local_query)) set_tag("remote_key_query", str(remote_queries)) - results = await self.store.claim_e2e_one_time_keys(local_query) + results = await self.claim_local_one_time_keys(local_query) # A map of user ID -> device ID -> key ID -> key. json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + for result in results: + for user_id, device_keys in result.items(): + for device_id, keys in device_keys.items(): + for key_id, key in keys.items(): + json_result.setdefault(user_id, {})[device_id] = {key_id: key} + + # Remote failures. failures: Dict[str, JsonDict] = {} - for user_id, device_keys in results.items(): - for device_id, keys in device_keys.items(): - for key_id, json_str in keys.items(): - json_result.setdefault(user_id, {})[device_id] = { - key_id: json_decoder.decode(json_str) - } @trace async def claim_client_keys(destination: str) -> None: diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index a3b6c8ae8e82..dc7768c50cab 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -51,7 +51,7 @@ from synapse.storage.engines import PostgresEngine from synapse.storage.util.id_generators import StreamIdGenerator from synapse.types import JsonDict -from synapse.util import json_encoder +from synapse.util import json_decoder, json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.cancellation import cancellable from synapse.util.iterutils import batch_iter @@ -1028,14 +1028,17 @@ def get_device_stream_token(self) -> int: async def claim_e2e_one_time_keys( self, query_list: Iterable[Tuple[str, str, str]] - ) -> Dict[str, Dict[str, Dict[str, str]]]: + ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: """Take a list of one time keys out of the database. Args: query_list: An iterable of tuples of (user ID, device ID, algorithm). Returns: - A map of user ID -> a map device ID -> a map of key ID -> JSON bytes. + A tuple pf: + A map of user ID -> a map device ID -> a map of key ID -> JSON. + + A copy of the input which has not been fulfilled. """ @trace @@ -1115,7 +1118,8 @@ def _claim_e2e_one_time_key_returning( key_id, key_json = otk_row return f"{algorithm}:{key_id}", key_json - results: Dict[str, Dict[str, Dict[str, str]]] = {} + results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + missing: List[Tuple[str, str, str]] = [] for user_id, device_id, algorithm in query_list: if self.database_engine.supports_returning: # If we support RETURNING clause we can use a single query that @@ -1138,11 +1142,25 @@ def _claim_e2e_one_time_key_returning( device_results = results.setdefault(user_id, {}).setdefault( device_id, {} ) - device_results[claim_row[0]] = claim_row[1] - continue + device_results[claim_row[0]] = json_decoder.decode(claim_row[1]) + else: + missing.append((user_id, device_id, algorithm)) + + return results, missing + + async def claim_e2e_fallback_keys( + self, query_list: Iterable[Tuple[str, str, str]] + ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + """Take a list of fallback keys out of the database. - # No one-time key available, so see if there's a fallback - # key + Args: + query_list: An iterable of tuples of (user ID, device ID, algorithm). + + Returns: + A map of user ID -> a map device ID -> a map of key ID -> JSON. + """ + results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + for user_id, device_id, algorithm in query_list: row = await self.db_pool.simple_select_one( table="e2e_fallback_keys_json", keyvalues={ @@ -1179,7 +1197,7 @@ def _claim_e2e_one_time_key_returning( ) device_results = results.setdefault(user_id, {}).setdefault(device_id, {}) - device_results[f"{algorithm}:{key_id}"] = key_json + device_results[f"{algorithm}:{key_id}"] = json_decoder.decode(key_json) return results diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 9d183b733ec2..0dd02b7d5839 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -105,3 +105,62 @@ async def get_json( ) self.assertEqual(self.request_url, URL_LOCATION) self.assertEqual(result, SUCCESS_RESULT_LOCATION) + + def test_claim_keys(self) -> None: + """ + Tests that the /keys/claim response is properly parsed for missing + keys. + """ + + RESPONSE: JsonDict = { + "@alice:example.org": { + "DEVICE_1": { + "signed_curve25519:AAAAHg": { + # We don't really care about the content of the keys, + # they get passed back transparently. + }, + "signed_curve25519:BBBBHg": {}, + }, + "DEVICE_2": {"signed_curve25519:CCCCHg": {}}, + }, + } + + async def post_json_get_json( + uri: str, + post_json: Any, + headers: Mapping[Union[str, bytes], Sequence[Union[str, bytes]]], + ) -> JsonDict: + # Ensure the access token is passed as both a header and query arg. + if not headers.get("Authorization"): + raise RuntimeError("Access token not provided") + + self.assertEqual(headers.get("Authorization"), [f"Bearer {TOKEN}"]) + return RESPONSE + + # We assign to a method, which mypy doesn't like. + self.api.post_json_get_json = Mock(side_effect=post_json_get_json) # type: ignore[assignment] + + MISSING_KEYS = [ + # Known user, known device, missing algorithm. + ("@alice:example.org", "DEVICE_1", "signed_curve25519:DDDDHg"), + # Known user, missing device. + ("@alice:example.org", "DEVICE_3", "signed_curve25519:EEEEHg"), + # Unknown user. + ("@bob:example.org", "DEVICE_4", "signed_curve25519:FFFFHg"), + ] + + claimed_keys, missing = self.get_success( + self.api.claim_client_keys( + self.service, + [ + # Found devices + ("@alice:example.org", "DEVICE_1", "signed_curve25519:AAAAHg"), + ("@alice:example.org", "DEVICE_1", "signed_curve25519:BBBBHg"), + ("@alice:example.org", "DEVICE_2", "signed_curve25519:CCCCHg"), + ] + + MISSING_KEYS, + ) + ) + + self.assertEqual(claimed_keys, RESPONSE) + self.assertEqual(missing, MISSING_KEYS) diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 6b4cba65d069..4ff04fc66bb7 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -23,18 +23,24 @@ from synapse.api.constants import RoomEncryptionAlgorithms from synapse.api.errors import Codes, SynapseError +from synapse.appservice import ApplicationService from synapse.handlers.device import DeviceHandler from synapse.server import HomeServer +from synapse.storage.databases.main.appservice import _make_exclusive_regex from synapse.types import JsonDict from synapse.util import Clock from tests import unittest from tests.test_utils import make_awaitable +from tests.unittest import override_config class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - return self.setup_test_homeserver(federation_client=mock.Mock()) + self.appservice_api = mock.Mock() + return self.setup_test_homeserver( + federation_client=mock.Mock(), application_service_api=self.appservice_api + ) def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = hs.get_e2e_keys_handler() @@ -941,3 +947,71 @@ def test_query_all_devices_caches_result(self, device_ids: Iterable[str]) -> Non # The two requests to the local homeserver should be identical. self.assertEqual(response_1, response_2) + + @override_config({"experimental_features": {"msc3983_appservice_otk_claims": True}}) + def test_query_appservice(self) -> None: + local_user = "@boris:" + self.hs.hostname + device_id_1 = "xyz" + fallback_key = {"alg1:k1": "fallback_key1"} + device_id_2 = "abc" + otk = {"alg1:k2": "key2"} + + # Inject an appservice interested in this user. + appservice = ApplicationService( + token="i_am_an_app_service", + id="1234", + namespaces={"users": [{"regex": r"@boris:*", "exclusive": True}]}, + # Note: this user does not have to match the regex above + sender="@as_main:test", + ) + self.hs.get_datastores().main.services_cache = [appservice] + self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( + [appservice] + ) + + # Setup a response, but only for device 2. + self.appservice_api.claim_client_keys.return_value = make_awaitable( + ({local_user: {device_id_2: otk}}, [(local_user, device_id_1, "alg1")]) + ) + + # we shouldn't have any unused fallback keys yet + res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(res, []) + + self.get_success( + self.handler.upload_keys_for_user( + local_user, + device_id_1, + {"fallback_keys": fallback_key}, + ) + ) + + # we should now have an unused alg1 key + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(local_user, device_id_1) + ) + self.assertEqual(fallback_res, ["alg1"]) + + # claiming an OTK when no OTKs are available should ask the appservice, then + # query the fallback keys. + claim_res = self.get_success( + self.handler.claim_one_time_keys( + { + "one_time_keys": { + local_user: {device_id_1: "alg1", device_id_2: "alg1"} + } + }, + timeout=None, + ) + ) + self.assertEqual( + claim_res, + { + "failures": {}, + "one_time_keys": { + local_user: {device_id_1: fallback_key, device_id_2: otk} + }, + }, + ) From 753d1d9cde08940edfd3851d230faaf18a2ba1ff Mon Sep 17 00:00:00 2001 From: "DeepBlueV7.X" Date: Wed, 29 Mar 2023 08:37:27 +0000 Subject: [PATCH 031/106] Fix joining rooms you have been unbanned from (#15323) * Fix joining rooms you have been unbanned from Since forever synapse did not allow you to join a room after you have been unbanned from it over federation. This was not actually because of the unban event not federating. Synapse simply used outdated state to validate the join transition. This skips the validation if we are not in the room and for that reason won't have the current room state. Fixes #1563 Signed-off-by: Nicolas Werner * Add changelog Signed-off-by: Nicolas Werner * Update changelog.d/15323.bugfix --------- Signed-off-by: Nicolas Werner --- changelog.d/15323.bugfix | 1 + synapse/handlers/federation_event.py | 2 +- synapse/handlers/room_member.py | 109 ++++++++++++++------------- 3 files changed, 59 insertions(+), 53 deletions(-) create mode 100644 changelog.d/15323.bugfix diff --git a/changelog.d/15323.bugfix b/changelog.d/15323.bugfix new file mode 100644 index 000000000000..bc1ab35532fd --- /dev/null +++ b/changelog.d/15323.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug preventing users from joining rooms, that they had been unbanned from, over federation. Contributed by Nico. diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index b7136f8d1ccb..648843cdbe9b 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -583,7 +583,7 @@ async def process_remote_join( await self._check_event_auth(origin, event, context) if context.rejected: - raise SynapseError(400, "Join event was rejected") + raise SynapseError(403, "Join event was rejected") # the remote server is responsible for sending our join event to the rest # of the federation. Indeed, attempting to do so will result in problems diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 509c5578895b..1d8b0aee6ff7 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -850,63 +850,68 @@ async def update_membership_locked( # `is_partial_state_room` also indicates whether `partial_state_before_join` is # partial. - # TODO: Refactor into dictionary of explicitly allowed transitions - # between old and new state, with specific error messages for some - # transitions and generic otherwise - old_state_id = partial_state_before_join.get( - (EventTypes.Member, target.to_string()) - ) - if old_state_id: - old_state = await self.store.get_event(old_state_id, allow_none=True) - old_membership = old_state.content.get("membership") if old_state else None - if action == "unban" and old_membership != "ban": - raise SynapseError( - 403, - "Cannot unban user who was not banned" - " (membership=%s)" % old_membership, - errcode=Codes.BAD_STATE, - ) - if old_membership == "ban" and action not in ["ban", "unban", "leave"]: - raise SynapseError( - 403, - "Cannot %s user who was banned" % (action,), - errcode=Codes.BAD_STATE, - ) - - if old_state: - same_content = content == old_state.content - same_membership = old_membership == effective_membership_state - same_sender = requester.user.to_string() == old_state.sender - if same_sender and same_membership and same_content: - # duplicate event. - # we know it was persisted, so must have a stream ordering. - assert old_state.internal_metadata.stream_ordering - return ( - old_state.event_id, - old_state.internal_metadata.stream_ordering, - ) + is_host_in_room = await self._is_host_in_room(partial_state_before_join) - if old_membership in ["ban", "leave"] and action == "kick": - raise AuthError(403, "The target user is not in the room") + # if we are not in the room, we won't have the current state + if is_host_in_room: + # TODO: Refactor into dictionary of explicitly allowed transitions + # between old and new state, with specific error messages for some + # transitions and generic otherwise + old_state_id = partial_state_before_join.get( + (EventTypes.Member, target.to_string()) + ) - # we don't allow people to reject invites to the server notice - # room, but they can leave it once they are joined. - if ( - old_membership == Membership.INVITE - and effective_membership_state == Membership.LEAVE - ): - is_blocked = await self.store.is_server_notice_room(room_id) - if is_blocked: + if old_state_id: + old_state = await self.store.get_event(old_state_id, allow_none=True) + old_membership = ( + old_state.content.get("membership") if old_state else None + ) + if action == "unban" and old_membership != "ban": raise SynapseError( - HTTPStatus.FORBIDDEN, - "You cannot reject this invite", - errcode=Codes.CANNOT_LEAVE_SERVER_NOTICE_ROOM, + 403, + "Cannot unban user who was not banned" + " (membership=%s)" % old_membership, + errcode=Codes.BAD_STATE, + ) + if old_membership == "ban" and action not in ["ban", "unban", "leave"]: + raise SynapseError( + 403, + "Cannot %s user who was banned" % (action,), + errcode=Codes.BAD_STATE, ) - else: - if action == "kick": - raise AuthError(403, "The target user is not in the room") - is_host_in_room = await self._is_host_in_room(partial_state_before_join) + if old_state: + same_content = content == old_state.content + same_membership = old_membership == effective_membership_state + same_sender = requester.user.to_string() == old_state.sender + if same_sender and same_membership and same_content: + # duplicate event. + # we know it was persisted, so must have a stream ordering. + assert old_state.internal_metadata.stream_ordering + return ( + old_state.event_id, + old_state.internal_metadata.stream_ordering, + ) + + if old_membership in ["ban", "leave"] and action == "kick": + raise AuthError(403, "The target user is not in the room") + + # we don't allow people to reject invites to the server notice + # room, but they can leave it once they are joined. + if ( + old_membership == Membership.INVITE + and effective_membership_state == Membership.LEAVE + ): + is_blocked = await self.store.is_server_notice_room(room_id) + if is_blocked: + raise SynapseError( + HTTPStatus.FORBIDDEN, + "You cannot reject this invite", + errcode=Codes.CANNOT_LEAVE_SERVER_NOTICE_ROOM, + ) + else: + if action == "kick": + raise AuthError(403, "The target user is not in the room") if effective_membership_state == Membership.JOIN: if requester.is_guest: From d0541e36c05fd062e75fecde49ad1b9815445138 Mon Sep 17 00:00:00 2001 From: Jayesh Nirve Date: Wed, 29 Mar 2023 14:31:23 +0530 Subject: [PATCH 032/106] doc: fix account login requests ratelimit defaults typo (#15341) * doc: fix account login requests ratelimit defaults typo Signed-off-by: td * chore: changelog.d file --------- Signed-off-by: td --- changelog.d/15341.doc | 1 + docs/usage/configuration/config_documentation.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15341.doc diff --git a/changelog.d/15341.doc b/changelog.d/15341.doc new file mode 100644 index 000000000000..d85c0fd2c490 --- /dev/null +++ b/changelog.d/15341.doc @@ -0,0 +1 @@ +Fix a typo in login requests ratelimit defaults. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 015855ee7ef4..060d0d5e69b4 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1521,7 +1521,7 @@ This option specifies several limits for login: address. Defaults to `per_second: 0.003`, `burst_count: 5`. * `account` ratelimits login requests based on the account the - client is attempting to log into. Defaults to `per_second: 0.03`, + client is attempting to log into. Defaults to `per_second: 0.003`, `burst_count: 5`. * `failed_attempts` ratelimits login requests based on the account the From 78cdb72cd6b0e007c314d9fed9f629dfc5b937a6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Mar 2023 12:07:14 +0100 Subject: [PATCH 033/106] Delete stale non-e2e devices for users, take 3 (#15183) This should help reduce the number of devices e.g. simple bots the repeatedly login rack up. We only delete non-e2e devices as they should be safe to delete, whereas if we delete e2e devices for a user we may accidentally break their ability to receive e2e keys for a message. --- changelog.d/15183.misc | 1 + synapse/handlers/device.py | 2 +- synapse/handlers/register.py | 50 +++++++++++++- synapse/storage/databases/main/devices.py | 80 ++++++++++++++++++++++- tests/handlers/test_admin.py | 2 +- tests/handlers/test_device.py | 2 +- tests/storage/test_client_ips.py | 4 +- 7 files changed, 134 insertions(+), 7 deletions(-) create mode 100644 changelog.d/15183.misc diff --git a/changelog.d/15183.misc b/changelog.d/15183.misc new file mode 100644 index 000000000000..f9bfc581ad53 --- /dev/null +++ b/changelog.d/15183.misc @@ -0,0 +1 @@ +Prune user's old devices on login if they have too many. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 9ded6389acdb..0fc165a8d68c 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -485,7 +485,7 @@ async def delete_all_devices_for_user( device_ids = [d for d in device_ids if d != except_device_id] await self.delete_devices(user_id, device_ids) - async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: + async def delete_devices(self, user_id: str, device_ids: StrCollection) -> None: """Delete several devices Args: diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c8bf2439afb5..bb1df1e60fc1 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -16,7 +16,7 @@ """Contains functions for registering clients.""" import logging -from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple from prometheus_client import Counter from typing_extensions import TypedDict @@ -40,6 +40,7 @@ from synapse.config.server import is_threepid_reserved from synapse.handlers.device import DeviceHandler from synapse.http.servlet import assert_params_in_dict +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.replication.http.register import ( ReplicationPostRegisterActionsServlet, @@ -48,6 +49,7 @@ from synapse.spam_checker_api import RegistrationBehaviour from synapse.types import RoomAlias, UserID, create_requester from synapse.types.state import StateFilter +from synapse.util.iterutils import batch_iter if TYPE_CHECKING: from synapse.server import HomeServer @@ -110,6 +112,10 @@ def __init__(self, hs: "HomeServer"): self._server_notices_mxid = hs.config.servernotices.server_notices_mxid self._server_name = hs.hostname + # The set of users that we're currently pruning devices for. Ensures + # that we don't have two such jobs for the same user running at once. + self._currently_pruning_devices_for_users: Set[str] = set() + self.spam_checker = hs.get_spam_checker() if hs.config.worker.worker_app: @@ -121,7 +127,10 @@ def __init__(self, hs: "HomeServer"): ReplicationPostRegisterActionsServlet.make_client(hs) ) else: - self.device_handler = hs.get_device_handler() + device_handler = hs.get_device_handler() + assert isinstance(device_handler, DeviceHandler) + self.device_handler = device_handler + self._register_device_client = self.register_device_inner self.pusher_pool = hs.get_pusherpool() @@ -851,6 +860,9 @@ class and RegisterDeviceReplicationServlet. # This can only run on the main process. assert isinstance(self.device_handler, DeviceHandler) + # Prune the user's device list if they already have a lot of devices. + await self._maybe_prune_too_many_devices(user_id) + registered_device_id = await self.device_handler.check_device_registered( user_id, device_id, @@ -919,6 +931,40 @@ class and RegisterDeviceReplicationServlet. "refresh_token": refresh_token, } + async def _maybe_prune_too_many_devices(self, user_id: str) -> None: + """Delete any excess old devices this user may have.""" + + if user_id in self._currently_pruning_devices_for_users: + return + + # We also cap the number of users whose devices we prune at the same + # time, to avoid performance problems. + if len(self._currently_pruning_devices_for_users) > 5: + return + + device_ids = await self.store.check_too_many_devices_for_user(user_id) + if not device_ids: + return + + # Now spawn a background loop that deletes said devices. + async def _prune_too_many_devices_loop() -> None: + if user_id in self._currently_pruning_devices_for_users: + return + + self._currently_pruning_devices_for_users.add(user_id) + + try: + for batch in batch_iter(device_ids, 10): + await self.device_handler.delete_devices(user_id, batch) + + await self.clock.sleep(60) + finally: + self._currently_pruning_devices_for_users.discard(user_id) + + run_as_background_process( + "_prune_too_many_devices_loop", _prune_too_many_devices_loop + ) + async def post_registration_actions( self, user_id: str, auth_result: dict, access_token: Optional[str] ) -> None: diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 5503621ad613..7647cda2c627 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1599,6 +1599,73 @@ def _txn(txn: LoggingTransaction) -> int: return rows + async def check_too_many_devices_for_user(self, user_id: str) -> List[str]: + """Check if the user has a lot of devices, and if so return the set of + devices we can prune. + + This does *not* return hidden devices or devices with E2E keys. + """ + + num_devices = await self.db_pool.simple_select_one_onecol( + table="devices", + keyvalues={"user_id": user_id, "hidden": False}, + retcol="COALESCE(COUNT(*), 0)", + desc="count_devices", + ) + + # We let users have up to ten devices without pruning. + if num_devices <= 10: + return [] + + # We always prune devices not seen in the last 14 days... + max_last_seen = self._clock.time_msec() - 14 * 24 * 60 * 60 * 1000 + + # ... but we also cap the maximum number of devices the user can have to + # 50. + if num_devices > 50: + # Choose a last seen that ensures we keep at most 50 devices. + sql = """ + SELECT last_seen FROM devices + LEFT JOIN e2e_device_keys_json USING (user_id, device_id) + WHERE + user_id = ? + AND NOT hidden + AND last_seen IS NOT NULL + AND key_json IS NULL + ORDER BY last_seen DESC + LIMIT 1 + OFFSET 50 + """ + + rows = await self.db_pool.execute( + "check_too_many_devices_for_user_last_seen", None, sql, (user_id,) + ) + if rows: + max_last_seen = max(rows[0][0], max_last_seen) + + # Fetch the devices to delete. + sql = """ + SELECT DISTINCT device_id FROM devices + LEFT JOIN e2e_device_keys_json USING (user_id, device_id) + WHERE + user_id = ? + AND NOT hidden + AND last_seen < ? + AND key_json IS NULL + ORDER BY last_seen + """ + + def check_too_many_devices_for_user_txn( + txn: LoggingTransaction, + ) -> List[str]: + txn.execute(sql, (user_id, max_last_seen)) + return [device_id for device_id, in txn] + + return await self.db_pool.runInteraction( + "check_too_many_devices_for_user", + check_too_many_devices_for_user_txn, + ) + class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): # Because we have write access, this will be a StreamIdGenerator @@ -1657,6 +1724,7 @@ async def store_device( values={}, insertion_values={ "display_name": initial_device_display_name, + "last_seen": self._clock.time_msec(), "hidden": False, }, desc="store_device", @@ -1702,7 +1770,15 @@ async def store_device( ) raise StoreError(500, "Problem storing device.") - async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: + @cached(max_entries=0) + async def delete_device(self, user_id: str, device_id: str) -> None: + raise NotImplementedError() + + # Note: sometimes deleting rows out of `device_inbox` can take a long time, + # so we use a cache so that we deduplicate in flight requests to delete + # devices. + @cachedList(cached_method_name="delete_device", list_name="device_ids") + async def delete_devices(self, user_id: str, device_ids: Collection[str]) -> dict: """Deletes several devices. Args: @@ -1739,6 +1815,8 @@ def _delete_devices_txn(txn: LoggingTransaction) -> None: for device_id in device_ids: self.device_id_exists_cache.invalidate((user_id, device_id)) + return {} + async def update_device( self, user_id: str, device_id: str, new_display_name: Optional[str] = None ) -> None: diff --git a/tests/handlers/test_admin.py b/tests/handlers/test_admin.py index 5569ccef8aef..f0ba3775c825 100644 --- a/tests/handlers/test_admin.py +++ b/tests/handlers/test_admin.py @@ -272,7 +272,7 @@ def test_devices(self) -> None: self.assertIn("device_id", args[0][0]) self.assertIsNone(args[0][0]["display_name"]) self.assertIsNone(args[0][0]["last_seen_user_agent"]) - self.assertIsNone(args[0][0]["last_seen_ts"]) + self.assertEqual(args[0][0]["last_seen_ts"], 600) self.assertIsNone(args[0][0]["last_seen_ip"]) def test_connections(self) -> None: diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index ce7525e29c0a..a456bffd632f 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -115,7 +115,7 @@ def test_get_devices_by_user(self) -> None: "device_id": "xyz", "display_name": "display 0", "last_seen_ip": None, - "last_seen_ts": None, + "last_seen_ts": 1000000, }, device_map["xyz"], ) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index cd0079871ca2..f98998653897 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -170,6 +170,8 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: ) ) + last_seen = self.clock.time_msec() + if after_persisting: # Trigger the storage loop self.reactor.advance(10) @@ -190,7 +192,7 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: "device_id": device_id, "ip": None, "user_agent": None, - "last_seen": None, + "last_seen": last_seen, }, ], ) From 5350b5d04da8aca80b60a6b4970020d13d789501 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Mar 2023 13:24:28 +0100 Subject: [PATCH 034/106] Revert "Reintroduce membership tables event stream ordering (#15128)" (#15347) This reverts commit e6af49fbea939d9e69ed05e0a0ced5948c722ea4. --- changelog.d/15128.misc | 1 - synapse/storage/databases/main/events.py | 23 ++---- .../storage/databases/main/purge_events.py | 6 +- synapse/storage/schema/__init__.py | 14 +--- ...embership_tables_event_stream_ordering.sql | 20 ----- ...p_tables_event_stream_ordering_triggers.py | 79 ------------------- 6 files changed, 12 insertions(+), 131 deletions(-) delete mode 100644 changelog.d/15128.misc delete mode 100644 synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql delete mode 100644 synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py diff --git a/changelog.d/15128.misc b/changelog.d/15128.misc deleted file mode 100644 index c09911e48d2f..000000000000 --- a/changelog.d/15128.misc +++ /dev/null @@ -1 +0,0 @@ -Add denormalised event stream ordering column to membership state tables for future use. Contributed by Nick @ Beeper (@fizzadar). diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index ccd9f9d14189..9c1e506da66c 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1127,15 +1127,11 @@ def _update_current_state_txn( # been inserted into room_memberships. txn.execute_batch( """INSERT INTO current_state_events - (room_id, type, state_key, event_id, membership, event_stream_ordering) - VALUES ( - ?, ?, ?, ?, - (SELECT membership FROM room_memberships WHERE event_id = ?), - (SELECT stream_ordering FROM events WHERE event_id = ?) - ) + (room_id, type, state_key, event_id, membership) + VALUES (?, ?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) """, [ - (room_id, key[0], key[1], ev_id, ev_id, ev_id) + (room_id, key[0], key[1], ev_id, ev_id) for key, ev_id in to_insert.items() ], ) @@ -1162,15 +1158,11 @@ def _update_current_state_txn( if to_insert: txn.execute_batch( """INSERT INTO local_current_membership - (room_id, user_id, event_id, membership, event_stream_ordering) - VALUES ( - ?, ?, ?, - (SELECT membership FROM room_memberships WHERE event_id = ?), - (SELECT stream_ordering FROM events WHERE event_id = ?) - ) + (room_id, user_id, event_id, membership) + VALUES (?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) """, [ - (room_id, key[1], ev_id, ev_id, ev_id) + (room_id, key[1], ev_id, ev_id) for key, ev_id in to_insert.items() if key[0] == EventTypes.Member and self.is_mine_id(key[1]) ], @@ -1776,7 +1768,6 @@ def _store_room_members_txn( table="room_memberships", keys=( "event_id", - "event_stream_ordering", "user_id", "sender", "room_id", @@ -1787,7 +1778,6 @@ def _store_room_members_txn( values=[ ( event.event_id, - event.internal_metadata.stream_ordering, event.state_key, event.user_id, event.room_id, @@ -1820,7 +1810,6 @@ def _store_room_members_txn( keyvalues={"room_id": event.room_id, "user_id": event.state_key}, values={ "event_id": event.event_id, - "event_stream_ordering": event.internal_metadata.stream_ordering, "membership": event.membership, }, ) diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index efbd3e75d99e..7a7c0d9c753d 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -428,16 +428,14 @@ def _purge_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[int]: "partial_state_events", "partial_state_rooms_servers", "partial_state_rooms", - # Note: the _membership(s) tables have foreign keys to the `events` table - # so must be deleted first. - "local_current_membership", - "room_memberships", "events", "federation_inbound_events_staging", + "local_current_membership", "receipts_graph", "receipts_linearized", "room_aliases", "room_depth", + "room_memberships", "room_stats_state", "room_stats_current", "room_stats_earliest_token", diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index a28f2b997cef..d3103a6c7a05 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -SCHEMA_VERSION = 75 # remember to update the list below when updating +SCHEMA_VERSION = 74 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -91,19 +91,13 @@ - A query on `event_stream_ordering` column has now been disambiguated (i.e. the codebase can handle the `current_state_events`, `local_current_memberships` and `room_memberships` tables having an `event_stream_ordering` column). - -Changes in SCHEMA_VERSION = 75: - - The `event_stream_ordering` column in membership tables (`current_state_events`, - `local_current_membership` & `room_memberships`) is now being populated for new - rows. When the background job to populate historical rows lands this will - become the compat schema version. """ SCHEMA_COMPAT_VERSION = ( - # Queries against `event_stream_ordering` columns in membership tables must - # be disambiguated. - 74 + # The threads_id column must exist for event_push_actions, event_push_summary, + # receipts_linearized, and receipts_graph. + 73 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql b/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql deleted file mode 100644 index e2608f3a2e83..000000000000 --- a/synapse/storage/schema/main/delta/74/01membership_tables_event_stream_ordering.sql +++ /dev/null @@ -1,20 +0,0 @@ -/* Copyright 2022 Beeper - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --- Each of these are denormalised copies of `stream_ordering` from the corresponding row in` events` which --- we use to improve database performance by reduring JOINs. -ALTER TABLE current_state_events ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); -ALTER TABLE local_current_membership ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); -ALTER TABLE room_memberships ADD COLUMN event_stream_ordering BIGINT REFERENCES events(stream_ordering); diff --git a/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py b/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py deleted file mode 100644 index e32e9083b359..000000000000 --- a/synapse/storage/schema/main/delta/74/02membership_tables_event_stream_ordering_triggers.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2022 Beeper -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -""" -This migration adds triggers to the room membership tables to enforce consistency. -Triggers cannot be expressed in .sql files, so we have to use a separate file. -""" -from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine -from synapse.storage.types import Cursor - - -def run_create(cur: Cursor, database_engine: BaseDatabaseEngine, *args, **kwargs): - # Complain if the `event_stream_ordering` in membership tables doesn't match - # the `stream_ordering` row with the same `event_id` in `events`. - if isinstance(database_engine, Sqlite3Engine): - for table in ( - "current_state_events", - "local_current_membership", - "room_memberships", - ): - cur.execute( - f""" - CREATE TRIGGER IF NOT EXISTS {table}_bad_event_stream_ordering - BEFORE INSERT ON {table} - FOR EACH ROW - BEGIN - SELECT RAISE(ABORT, 'Incorrect event_stream_ordering in {table}') - WHERE EXISTS ( - SELECT 1 FROM events - WHERE events.event_id = NEW.event_id - AND events.stream_ordering != NEW.event_stream_ordering - ); - END; - """ - ) - elif isinstance(database_engine, PostgresEngine): - cur.execute( - """ - CREATE OR REPLACE FUNCTION check_event_stream_ordering() RETURNS trigger AS $BODY$ - BEGIN - IF EXISTS ( - SELECT 1 FROM events - WHERE events.event_id = NEW.event_id - AND events.stream_ordering != NEW.event_stream_ordering - ) THEN - RAISE EXCEPTION 'Incorrect event_stream_ordering'; - END IF; - RETURN NEW; - END; - $BODY$ LANGUAGE plpgsql; - """ - ) - - for table in ( - "current_state_events", - "local_current_membership", - "room_memberships", - ): - cur.execute( - f""" - CREATE TRIGGER check_event_stream_ordering BEFORE INSERT OR UPDATE ON {table} - FOR EACH ROW - EXECUTE PROCEDURE check_event_stream_ordering() - """ - ) - else: - raise NotImplementedError("Unknown database engine") From f0d8f66eaaacfa75bed65bc5d0c602fbc5339c85 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Mar 2023 14:37:06 +0100 Subject: [PATCH 035/106] Fix registering a device on an account with lots of devices (#15348) Fixes up #15183 --- changelog.d/15348.misc | 1 + synapse/handlers/register.py | 2 + synapse/storage/databases/main/devices.py | 9 +++-- tests/rest/client/test_register.py | 47 +++++++++++++++++++++++ 4 files changed, 56 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15348.misc diff --git a/changelog.d/15348.misc b/changelog.d/15348.misc new file mode 100644 index 000000000000..f9bfc581ad53 --- /dev/null +++ b/changelog.d/15348.misc @@ -0,0 +1 @@ +Prune user's old devices on login if they have too many. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index bb1df1e60fc1..7e9d065f50e1 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -946,6 +946,8 @@ async def _maybe_prune_too_many_devices(self, user_id: str) -> None: if not device_ids: return + logger.info("Pruning %d stale devices for %s", len(device_ids), user_id) + # Now spawn a background loop that deletes said devices. async def _prune_too_many_devices_loop() -> None: if user_id in self._currently_pruning_devices_for_users: diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 7647cda2c627..f61b7bc96eec 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1638,19 +1638,22 @@ async def check_too_many_devices_for_user(self, user_id: str) -> List[str]: """ rows = await self.db_pool.execute( - "check_too_many_devices_for_user_last_seen", None, sql, (user_id,) + "check_too_many_devices_for_user_last_seen", + None, + sql, + user_id, ) if rows: max_last_seen = max(rows[0][0], max_last_seen) # Fetch the devices to delete. sql = """ - SELECT DISTINCT device_id FROM devices + SELECT device_id FROM devices LEFT JOIN e2e_device_keys_json USING (user_id, device_id) WHERE user_id = ? AND NOT hidden - AND last_seen < ? + AND last_seen <= ? AND key_json IS NULL ORDER BY last_seen """ diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index b228dba8613d..7ae84e3139aa 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -794,6 +794,53 @@ def test_require_approval(self) -> None: ApprovalNoticeMedium.NONE, channel.json_body["approval_notice_medium"] ) + def test_check_stale_devices_get_pruned(self) -> None: + """Check that if a user has some stale devices we log them out when they + log in a new device.""" + + # Register some devices, but not too many that we go over the threshold + # where we prune more aggressively. + user_id = self.register_user("user", "pass") + for _ in range(0, 50): + self.login(user_id, "pass") + + store = self.hs.get_datastores().main + + res = self.get_success(store.get_devices_by_user(user_id)) + self.assertEqual(len(res), 50) + + # Advance time so that the above devices are considered "old". + self.reactor.advance(30 * 24 * 60 * 60 * 1000) + + self.login(user_id, "pass") + + self.reactor.pump([60] * 10) # Ensure background job runs + + # We expect all old devices to have been logged out + res = self.get_success(store.get_devices_by_user(user_id)) + self.assertEqual(len(res), 1) + + def test_check_recent_devices_get_pruned(self) -> None: + """Check that if a user has many devices we log out the last oldest + ones. + + Note: this is similar to above, except if we lots of devices we prune + devices even if they're not old. + """ + + # Register a lot of devices in a short amount of time + user_id = self.register_user("user", "pass") + for _ in range(0, 100): + self.login(user_id, "pass") + self.reactor.advance(100) + + store = self.hs.get_datastores().main + + # We keep up to 50 devices that have been used in the last week, plus + # the device that was last logged in. + res = self.get_success(store.get_devices_by_user(user_id)) + self.assertEqual(len(res), 51) + class AccountValidityTestCase(unittest.HomeserverTestCase): servlets = [ From 9d641d88b785dd4b4e6e7dca3356678a42a3ac23 Mon Sep 17 00:00:00 2001 From: Cyberes <64224601+Cyberes@users.noreply.github.com> Date: Thu, 30 Mar 2023 04:44:53 -0600 Subject: [PATCH 036/106] Fix missing app variable in mail subject for password resets (#15352) * Update mailer.py Fix `KeyError: 'app'` * Create 15352.bugfix Signed-off-by: Cyberes --------- Signed-off-by: Cyberes --- changelog.d/15352.bugfix | 1 + synapse/push/mailer.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15352.bugfix diff --git a/changelog.d/15352.bugfix b/changelog.d/15352.bugfix new file mode 100644 index 000000000000..36d6615cac3c --- /dev/null +++ b/changelog.d/15352.bugfix @@ -0,0 +1 @@ +Fix missing app variable in mail subject for password resets. Contributed by Cyberes. diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 93b255ced579..491a09b71d54 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -149,7 +149,7 @@ async def send_password_reset_mail( await self.send_email( email_address, self.email_subjects.password_reset - % {"server_name": self.hs.config.server.server_name}, + % {"server_name": self.hs.config.server.server_name, "app": self.app_name}, template_vars, ) From 9228ae633f209212ed55de7943d1a8aee3645b57 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Thu, 30 Mar 2023 12:51:35 +0200 Subject: [PATCH 037/106] Add some clarification to the doc/comments regarding TCP replication (#15354) --- changelog.d/15354.misc | 1 + docs/tcp_replication.md | 9 ++++--- synapse/replication/tcp/protocol.py | 31 +----------------------- synapse/replication/tcp/streams/_base.py | 4 +-- 4 files changed, 9 insertions(+), 36 deletions(-) create mode 100644 changelog.d/15354.misc diff --git a/changelog.d/15354.misc b/changelog.d/15354.misc new file mode 100644 index 000000000000..862444edfbaf --- /dev/null +++ b/changelog.d/15354.misc @@ -0,0 +1 @@ +Add some clarification to the doc/comments regarding TCP replication. diff --git a/docs/tcp_replication.md b/docs/tcp_replication.md index 15df949debc8..083cda8413d3 100644 --- a/docs/tcp_replication.md +++ b/docs/tcp_replication.md @@ -25,7 +25,7 @@ position of all streams. The server then periodically sends `RDATA` commands which have the format `RDATA `, where the format of `` is defined by the individual streams. The `` is the name of the Synapse process that generated the data -(usually "master"). +(usually "master"). We expect an RDATA for every row in the DB. Error reporting happens by either the client or server sending an ERROR command, and usually the connection will be closed. @@ -107,7 +107,7 @@ reconnect, following the steps above. If the server sends messages faster than the client can consume them the server will first buffer a (fairly large) number of commands and then disconnect the client. This ensures that we don't queue up an unbounded -number of commands in memory and gives us a potential oppurtunity to +number of commands in memory and gives us a potential opportunity to squawk loudly. When/if the client recovers it can reconnect to the server and ask for missed messages. @@ -122,7 +122,7 @@ since these include tokens which can be used to restart the stream on connection errors. The client should keep track of the token in the last RDATA command -received for each stream so that on reconneciton it can start streaming +received for each stream so that on reconnection it can start streaming from the correct place. Note: not all RDATA have valid tokens due to batching. See `RdataCommand` for more details. @@ -188,7 +188,8 @@ client (C): Two positions are included, the "new" position and the last position sent respectively. This allows servers to tell instances that the positions have advanced but no data has been written, without clients needlessly checking to see if they - have missed any updates. + have missed any updates. Instances will only fetch stuff if there is a gap between + their current position and the given last position. #### ERROR (S, C) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 56a5c21910d9..a7248d7b2e49 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -14,36 +14,7 @@ """This module contains the implementation of both the client and server protocols. -The basic structure of the protocol is line based, where the initial word of -each line specifies the command. The rest of the line is parsed based on the -command. For example, the `RDATA` command is defined as:: - - RDATA - -(Note that `` may contains spaces, but cannot contain newlines.) - -Blank lines are ignored. - -# Example - -An example iteraction is shown below. Each line is prefixed with '>' or '<' to -indicate which side is sending, these are *not* included on the wire:: - - * connection established * - > SERVER localhost:8823 - > PING 1490197665618 - < NAME synapse.app.appservice - < PING 1490197665618 - < REPLICATE - > POSITION events 1 - > POSITION backfill 1 - > POSITION caches 1 - > RDATA caches 2 ["get_user_by_id",["@01register-user:localhost:8823"],1490197670513] - > RDATA events 14 ["ev", ["$149019767112vOHxz:localhost:8823", - "!AFDCvgApUmpdfVjIXm:localhost:8823","m.room.guest_access","",null]] - < PING 1490197675618 - > ERROR server stopping - * connection closed by server * +An explanation of this protocol is available in docs/tcp_replication.md """ import fcntl import logging diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index a4bdb48c0c99..c6088a0f99f8 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -152,8 +152,8 @@ async def get_updates(self) -> StreamUpdateResult: Returns: A triplet `(updates, new_last_token, limited)`, where `updates` is a list of `(token, row)` entries, `new_last_token` is the new - position in stream, and `limited` is whether there are more updates - to fetch. + position in stream (ie the highest token returned in the updates), + and `limited` is whether there are more updates to fetch. """ current_token = self.current_token(self.local_instance_name) updates, current_token, limited = await self.get_updates_since( From a3bad89d57645b2ea304d2900adab71a786b0172 Mon Sep 17 00:00:00 2001 From: Warren Bailey Date: Thu, 30 Mar 2023 12:09:41 +0100 Subject: [PATCH 038/106] Add the ability to enable/disable registrations when in the OIDC flow (#14978) Signed-off-by: Warren Bailey --- changelog.d/14978.feature | 1 + .../usage/configuration/config_documentation.md | 6 ++++++ synapse/config/oidc.py | 5 +++++ synapse/handlers/oidc.py | 1 + synapse/handlers/sso.py | 17 +++++++++++++++-- tests/handlers/test_oidc.py | 17 ++++++++++++++++- 6 files changed, 44 insertions(+), 3 deletions(-) create mode 100644 changelog.d/14978.feature diff --git a/changelog.d/14978.feature b/changelog.d/14978.feature new file mode 100644 index 000000000000..14f6fee65896 --- /dev/null +++ b/changelog.d/14978.feature @@ -0,0 +1 @@ +Add the ability to enable/disable registrations when in the OIDC flow. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 060d0d5e69b4..c5c2c2b6152e 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3100,6 +3100,11 @@ Options for each entry include: match a pre-existing account instead of failing. This could be used if switching from password logins to OIDC. Defaults to false. +* `enable_registration`: set to 'false' to disable automatic registration of new + users. This allows the OIDC SSO flow to be limited to sign in only, rather than + automatically registering users that have a valid SSO login but do not have + a pre-registered account. Defaults to true. + * `user_mapping_provider`: Configuration for how attributes returned from a OIDC provider are mapped onto a matrix user. This setting has the following sub-properties: @@ -3216,6 +3221,7 @@ oidc_providers: userinfo_endpoint: "https://accounts.example.com/userinfo" jwks_uri: "https://accounts.example.com/.well-known/jwks.json" skip_verification: true + enable_registration: true user_mapping_provider: config: subject_claim: "id" diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index df8c42204392..77c1d1dc8e0f 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -136,6 +136,7 @@ def oidc_enabled(self) -> bool: "type": "array", "items": SsoAttributeRequirement.JSON_SCHEMA, }, + "enable_registration": {"type": "boolean"}, }, } @@ -306,6 +307,7 @@ def _parse_oidc_config_dict( user_mapping_provider_class=user_mapping_provider_class, user_mapping_provider_config=user_mapping_provider_config, attribute_requirements=attribute_requirements, + enable_registration=oidc_config.get("enable_registration", True), ) @@ -405,3 +407,6 @@ class OidcProviderConfig: # required attributes to require in userinfo to allow login/registration attribute_requirements: List[SsoAttributeRequirement] + + # Whether automatic registrations are enabled in the ODIC flow. Defaults to True + enable_registration: bool diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 0fc829acf77d..e7e0b5e049b3 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -1239,6 +1239,7 @@ async def grandfather_existing_users() -> Optional[str]: grandfather_existing_users, extra_attributes, auth_provider_session_id=sid, + registration_enabled=self._config.enable_registration, ) def _remote_id_from_userinfo(self, userinfo: UserInfo) -> str: diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 4a27c0f05142..c28325323c44 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -383,6 +383,7 @@ async def complete_sso_login_request( grandfather_existing_users: Callable[[], Awaitable[Optional[str]]], extra_login_attributes: Optional[JsonDict] = None, auth_provider_session_id: Optional[str] = None, + registration_enabled: bool = True, ) -> None: """ Given an SSO ID, retrieve the user ID for it and possibly register the user. @@ -435,6 +436,10 @@ async def complete_sso_login_request( auth_provider_session_id: An optional session ID from the IdP. + registration_enabled: An optional boolean to enable/disable automatic + registrations of new users. If false and the user does not exist then the + flow is aborted. Defaults to true. + Raises: MappingException if there was a problem mapping the response to a user. RedirectException: if the mapping provider needs to redirect the user @@ -462,8 +467,16 @@ async def complete_sso_login_request( auth_provider_id, remote_user_id, user_id ) - # Otherwise, generate a new user. - if not user_id: + if not user_id and not registration_enabled: + logger.info( + "User does not exist and registration are disabled for IdP '%s' and remote_user_id '%s'", + auth_provider_id, + remote_user_id, + ) + raise MappingException( + "User does not exist and registrations are disabled" + ) + elif not user_id: # Otherwise, generate a new user. attributes = await self._call_attribute_mapper(sso_to_matrix_id_mapper) next_step_url = self._get_url_for_next_new_user_step( diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 951caaa6b3ba..0a8bae54fbea 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -922,7 +922,7 @@ def test_extra_attributes(self) -> None: auth_provider_session_id=None, ) - @override_config({"oidc_config": DEFAULT_CONFIG}) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "enable_registration": True}}) def test_map_userinfo_to_user(self) -> None: """Ensure that mapping the userinfo returned from a provider to an MXID works properly.""" userinfo: dict = { @@ -975,6 +975,21 @@ def test_map_userinfo_to_user(self) -> None: "Mapping provider does not support de-duplicating Matrix IDs", ) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "enable_registration": False}}) + def test_map_userinfo_to_user_does_not_register_new_user(self) -> None: + """Ensures new users are not registered if the enabled registration flag is disabled.""" + userinfo: dict = { + "sub": "test_user", + "username": "test_user", + } + request, _ = self.start_authorization(userinfo) + self.get_success(self.handler.handle_oidc_callback(request)) + self.complete_sso_login.assert_not_called() + self.assertRenderedError( + "mapping_error", + "User does not exist and registrations are disabled", + ) + @override_config({"oidc_config": {**DEFAULT_CONFIG, "allow_existing_users": True}}) def test_map_userinfo_to_existing_user(self) -> None: """Existing users can log in with OpenID Connect when allow_existing_users is True.""" From d9f694932c64d68e791ecb4c860e911e21a0baeb Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Thu, 30 Mar 2023 13:36:41 +0100 Subject: [PATCH 039/106] Fix spinloop during partial state sync when a prev event is in backoff (#15351) Previously, we would spin in a tight loop until `update_state_for_partial_state_event` stopped raising `FederationPullAttemptBackoffError`s. Replace the spinloop with a wait until the backoff period has expired. Signed-off-by: Sean Quah --- changelog.d/15351.bugfix | 1 + synapse/api/errors.py | 17 ++++++--- synapse/handlers/federation.py | 36 +++++++++---------- synapse/handlers/federation_event.py | 24 +++++++++---- .../databases/main/event_federation.py | 35 ++++++++++-------- tests/storage/test_event_federation.py | 13 ++++--- 6 files changed, 79 insertions(+), 47 deletions(-) create mode 100644 changelog.d/15351.bugfix diff --git a/changelog.d/15351.bugfix b/changelog.d/15351.bugfix new file mode 100644 index 000000000000..e68023c6716f --- /dev/null +++ b/changelog.d/15351.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 8c6822f3c6ea..f2d6f9ab2d9e 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -27,7 +27,7 @@ if typing.TYPE_CHECKING: from synapse.config.homeserver import HomeServerConfig - from synapse.types import JsonDict + from synapse.types import JsonDict, StrCollection logger = logging.getLogger(__name__) @@ -682,18 +682,27 @@ class FederationPullAttemptBackoffError(RuntimeError): Attributes: event_id: The event_id which we are refusing to pull message: A custom error message that gives more context + retry_after_ms: The remaining backoff interval, in milliseconds """ - def __init__(self, event_ids: List[str], message: Optional[str]): - self.event_ids = event_ids + def __init__( + self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int + ): + event_ids = list(event_ids) if message: error_message = message else: - error_message = f"Not attempting to pull event_ids={self.event_ids} because we already tried to pull them recently (backing off)." + error_message = ( + f"Not attempting to pull event_ids={event_ids} because we already " + "tried to pull them recently (backing off)." + ) super().__init__(error_message) + self.event_ids = event_ids + self.retry_after_ms = retry_after_ms + class HttpResponseException(CodeMessageException): """ diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 80156ef343aa..65461a078723 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1949,27 +1949,25 @@ async def _sync_partial_state_room( ) for event in events: for attempt in itertools.count(): + # We try a new destination on every iteration. try: - await self._federation_event_handler.update_state_for_partial_state_event( - destination, event - ) + while True: + try: + await self._federation_event_handler.update_state_for_partial_state_event( + destination, event + ) + break + except FederationPullAttemptBackoffError as e: + # We are in the backoff period for one of the event's + # prev_events. Wait it out and try again after. + logger.warning( + "%s; waiting for %d ms...", e, e.retry_after_ms + ) + await self.clock.sleep(e.retry_after_ms / 1000) + + # Success, no need to try the rest of the destinations. break - except FederationPullAttemptBackoffError as exc: - # Log a warning about why we failed to process the event (the error message - # for `FederationPullAttemptBackoffError` is pretty good) - logger.warning("_sync_partial_state_room: %s", exc) - # We do not record a failed pull attempt when we backoff fetching a missing - # `prev_event` because not being able to fetch the `prev_events` just means - # we won't be able to de-outlier the pulled event. But we can still use an - # `outlier` in the state/auth chain for another event. So we shouldn't stop - # a downstream event from trying to pull it. - # - # This avoids a cascade of backoff for all events in the DAG downstream from - # one event backoff upstream. except FederationError as e: - # TODO: We should `record_event_failed_pull_attempt` here, - # see https://github.com/matrix-org/synapse/issues/13700 - if attempt == len(destinations) - 1: # We have tried every remote server for this event. Give up. # TODO(faster_joins) giving up isn't the right thing to do @@ -1986,6 +1984,8 @@ async def _sync_partial_state_room( destination, e, ) + # TODO: We should `record_event_failed_pull_attempt` here, + # see https://github.com/matrix-org/synapse/issues/13700 raise # Try the next remote server. diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 648843cdbe9b..982c8d3b2ff1 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -140,6 +140,7 @@ class FederationEventHandler: """ def __init__(self, hs: "HomeServer"): + self._clock = hs.get_clock() self._store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() self._state_storage_controller = self._storage_controllers.state @@ -1038,8 +1039,8 @@ async def _compute_event_context_with_maybe_missing_prevs( Raises: FederationPullAttemptBackoffError if we are are deliberately not attempting - to pull the given event over federation because we've already done so - recently and are backing off. + to pull one of the given event's `prev_event`s over federation because + we've already done so recently and are backing off. FederationError if we fail to get the state from the remote server after any missing `prev_event`s. """ @@ -1053,13 +1054,22 @@ async def _compute_event_context_with_maybe_missing_prevs( # If we've already recently attempted to pull this missing event, don't # try it again so soon. Since we have to fetch all of the prev_events, we can # bail early here if we find any to ignore. - prevs_to_ignore = await self._store.get_event_ids_to_not_pull_from_backoff( - room_id, missing_prevs + prevs_with_pull_backoff = ( + await self._store.get_event_ids_to_not_pull_from_backoff( + room_id, missing_prevs + ) ) - if len(prevs_to_ignore) > 0: + if len(prevs_with_pull_backoff) > 0: raise FederationPullAttemptBackoffError( - event_ids=prevs_to_ignore, - message=f"While computing context for event={event_id}, not attempting to pull missing prev_event={prevs_to_ignore[0]} because we already tried to pull recently (backing off).", + event_ids=prevs_with_pull_backoff.keys(), + message=( + f"While computing context for event={event_id}, not attempting to " + f"pull missing prev_events={list(prevs_with_pull_backoff.keys())} " + "because we already tried to pull recently (backing off)." + ), + retry_after_ms=( + max(prevs_with_pull_backoff.values()) - self._clock.time_msec() + ), ) if not missing_prevs: diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index ff3edeb7160b..a19ba88bf8af 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -1544,7 +1544,7 @@ async def get_event_ids_to_not_pull_from_backoff( self, room_id: str, event_ids: Collection[str], - ) -> List[str]: + ) -> Dict[str, int]: """ Filter down the events to ones that we've failed to pull before recently. Uses exponential backoff. @@ -1554,7 +1554,8 @@ async def get_event_ids_to_not_pull_from_backoff( event_ids: A list of events to filter down Returns: - List of event_ids that should not be attempted to be pulled + A dictionary of event_ids that should not be attempted to be pulled and the + next timestamp at which we may try pulling them again. """ event_failed_pull_attempts = await self.db_pool.simple_select_many_batch( table="event_failed_pull_attempts", @@ -1570,22 +1571,28 @@ async def get_event_ids_to_not_pull_from_backoff( ) current_time = self._clock.time_msec() - return [ - event_failed_pull_attempt["event_id"] - for event_failed_pull_attempt in event_failed_pull_attempts + + event_ids_with_backoff = {} + for event_failed_pull_attempt in event_failed_pull_attempts: + event_id = event_failed_pull_attempt["event_id"] # Exponential back-off (up to the upper bound) so we don't try to # pull the same event over and over. ex. 2hr, 4hr, 8hr, 16hr, etc. - if current_time - < event_failed_pull_attempt["last_attempt_ts"] - + ( - 2 - ** min( - event_failed_pull_attempt["num_attempts"], - BACKFILL_EVENT_EXPONENTIAL_BACKOFF_MAXIMUM_DOUBLING_STEPS, + backoff_end_time = ( + event_failed_pull_attempt["last_attempt_ts"] + + ( + 2 + ** min( + event_failed_pull_attempt["num_attempts"], + BACKFILL_EVENT_EXPONENTIAL_BACKOFF_MAXIMUM_DOUBLING_STEPS, + ) ) + * BACKFILL_EVENT_EXPONENTIAL_BACKOFF_STEP_MILLISECONDS ) - * BACKFILL_EVENT_EXPONENTIAL_BACKOFF_STEP_MILLISECONDS - ] + + if current_time < backoff_end_time: # `backoff_end_time` is exclusive + event_ids_with_backoff[event_id] = backoff_end_time + + return event_ids_with_backoff async def get_missing_events( self, diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 3e1984c15cf0..81e50bdd5523 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -1143,19 +1143,24 @@ def test_get_event_ids_to_not_pull_from_backoff(self) -> None: tok = self.login("alice", "test") room_id = self.helper.create_room_as(room_creator=user_id, tok=tok) + failure_time = self.clock.time_msec() self.get_success( self.store.record_event_failed_pull_attempt( room_id, "$failed_event_id", "fake cause" ) ) - event_ids_to_backoff = self.get_success( + event_ids_with_backoff = self.get_success( self.store.get_event_ids_to_not_pull_from_backoff( room_id=room_id, event_ids=["$failed_event_id", "$normal_event_id"] ) ) - self.assertEqual(event_ids_to_backoff, ["$failed_event_id"]) + self.assertEqual( + event_ids_with_backoff, + # We expect a 2^1 hour backoff after a single failed attempt. + {"$failed_event_id": failure_time + 2 * 60 * 60 * 1000}, + ) def test_get_event_ids_to_not_pull_from_backoff_retry_after_backoff_duration( self, @@ -1179,14 +1184,14 @@ def test_get_event_ids_to_not_pull_from_backoff_retry_after_backoff_duration( # attempt (2^1 hours). self.reactor.advance(datetime.timedelta(hours=2).total_seconds()) - event_ids_to_backoff = self.get_success( + event_ids_with_backoff = self.get_success( self.store.get_event_ids_to_not_pull_from_backoff( room_id=room_id, event_ids=["$failed_event_id", "$normal_event_id"] ) ) # Since this function only returns events we should backoff from, time has # elapsed past the backoff range so there is no events to backoff from. - self.assertEqual(event_ids_to_backoff, []) + self.assertEqual(event_ids_with_backoff, {}) @attr.s(auto_attribs=True) From ae4acda1bb903f504e946442bfc66dd0e5757dad Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 30 Mar 2023 08:39:38 -0400 Subject: [PATCH 040/106] Implement MSC3984 to proxy /keys/query requests to appservices. (#15321) If enabled, for users which are exclusively owned by an application service then the appservice will be queried for devices in addition to any information stored in the Synapse database. --- changelog.d/15314.feature | 2 +- changelog.d/15321.feature | 1 + synapse/appservice/api.py | 54 ++++++++++- synapse/config/experimental.py | 5 + synapse/federation/federation_client.py | 48 ++-------- synapse/handlers/appservice.py | 61 ++++++++++++ synapse/handlers/e2e_keys.py | 16 ++++ synapse/http/client.py | 38 ++++++++ tests/handlers/test_e2e_keys.py | 121 +++++++++++++++++++++++- 9 files changed, 298 insertions(+), 48 deletions(-) create mode 100644 changelog.d/15321.feature diff --git a/changelog.d/15314.feature b/changelog.d/15314.feature index 68b289b0cc7c..5ce0c029cee2 100644 --- a/changelog.d/15314.feature +++ b/changelog.d/15314.feature @@ -1 +1 @@ -Experimental support for passing One Time Key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983)). +Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). diff --git a/changelog.d/15321.feature b/changelog.d/15321.feature new file mode 100644 index 000000000000..5ce0c029cee2 --- /dev/null +++ b/changelog.d/15321.feature @@ -0,0 +1 @@ +Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 51ee0e79df10..b27eedef99f2 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -30,7 +30,7 @@ from typing_extensions import TypeGuard from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind -from synapse.api.errors import CodeMessageException +from synapse.api.errors import CodeMessageException, HttpResponseException from synapse.appservice import ( ApplicationService, TransactionOneTimeKeysCount, @@ -38,7 +38,7 @@ ) from synapse.events import EventBase from synapse.events.utils import SerializeEventConfig, serialize_event -from synapse.http.client import SimpleHttpClient +from synapse.http.client import SimpleHttpClient, is_unknown_endpoint from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID from synapse.util.caches.response_cache import ResponseCache @@ -393,7 +393,11 @@ async def claim_client_keys( ) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]: """Claim one time keys from an application service. + Note that any error (including a timeout) is treated as the application + service having no information. + Args: + service: The application service to query. query: An iterable of tuples of (user ID, device ID, algorithm). Returns: @@ -422,9 +426,9 @@ async def claim_client_keys( body, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) - except CodeMessageException as e: + except HttpResponseException as e: # The appservice doesn't support this endpoint. - if e.code == 404 or e.code == 405: + if is_unknown_endpoint(e): return {}, query logger.warning("claim_keys to %s received %s", uri, e.code) return {}, query @@ -444,6 +448,48 @@ async def claim_client_keys( return response, missing + async def query_keys( + self, service: "ApplicationService", query: Dict[str, List[str]] + ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + """Query the application service for keys. + + Note that any error (including a timeout) is treated as the application + service having no information. + + Args: + service: The application service to query. + query: An iterable of tuples of (user ID, device ID, algorithm). + + Returns: + A map of device_keys/master_keys/self_signing_keys/user_signing_keys: + + device_keys is a map of user ID -> a map device ID -> device info. + """ + if service.url is None: + return {} + + # This is required by the configuration. + assert service.hs_token is not None + + uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3984/keys/query" + try: + response = await self.post_json_get_json( + uri, + query, + headers={"Authorization": [f"Bearer {service.hs_token}"]}, + ) + except HttpResponseException as e: + # The appservice doesn't support this endpoint. + if is_unknown_endpoint(e): + return {} + logger.warning("query_keys to %s received %s", uri, e.code) + return {} + except Exception as ex: + logger.warning("query_keys to %s threw exception %s", uri, ex) + return {} + + return response + def _serialize( self, service: "ApplicationService", events: Iterable[EventBase] ) -> List[JsonDict]: diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 53e6fc2b54d6..7687c80ea080 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -79,6 +79,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: "msc3983_appservice_otk_claims", False ) + # MSC3984: Proxying key queries to exclusive ASes. + self.msc3984_appservice_key_query: bool = experimental.get( + "msc3984_appservice_key_query", False + ) + # MSC3706 (server-side support for partial state in /send_join responses) # Synapse will always serve partial state responses to requests using the stable # query parameter `omit_members`. If this flag is set, Synapse will also serve diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7d04560dca78..4cf4957a42ca 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -61,6 +61,7 @@ event_from_pdu_json, ) from synapse.federation.transport.client import SendJoinResponse +from synapse.http.client import is_unknown_endpoint from synapse.http.types import QueryParams from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace from synapse.types import JsonDict, UserID, get_domain_from_id @@ -759,43 +760,6 @@ async def get_event_auth( return signed_auth - def _is_unknown_endpoint( - self, e: HttpResponseException, synapse_error: Optional[SynapseError] = None - ) -> bool: - """ - Returns true if the response was due to an endpoint being unimplemented. - - Args: - e: The error response received from the remote server. - synapse_error: The above error converted to a SynapseError. This is - automatically generated if not provided. - - """ - if synapse_error is None: - synapse_error = e.to_synapse_error() - # MSC3743 specifies that servers should return a 404 or 405 with an errcode - # of M_UNRECOGNIZED when they receive a request to an unknown endpoint or - # to an unknown method, respectively. - # - # Older versions of servers don't properly handle this. This needs to be - # rather specific as some endpoints truly do return 404 errors. - return ( - # 404 is an unknown endpoint, 405 is a known endpoint, but unknown method. - (e.code == 404 or e.code == 405) - and ( - # Older Dendrites returned a text or empty body. - # Older Conduit returned an empty body. - not e.response - or e.response == b"404 page not found" - # The proper response JSON with M_UNRECOGNIZED errcode. - or synapse_error.errcode == Codes.UNRECOGNIZED - ) - ) or ( - # Older Synapses returned a 400 error. - e.code == 400 - and synapse_error.errcode == Codes.UNRECOGNIZED - ) - async def _try_destination_list( self, description: str, @@ -887,7 +851,7 @@ async def _try_destination_list( elif 400 <= e.code < 500 and synapse_error.errcode in failover_errcodes: failover = True - elif failover_on_unknown_endpoint and self._is_unknown_endpoint( + elif failover_on_unknown_endpoint and is_unknown_endpoint( e, synapse_error ): failover = True @@ -1223,7 +1187,7 @@ async def _do_send_join( # If an error is received that is due to an unrecognised endpoint, # fallback to the v1 endpoint. Otherwise, consider it a legitimate error # and raise. - if not self._is_unknown_endpoint(e): + if not is_unknown_endpoint(e): raise logger.debug("Couldn't send_join with the v2 API, falling back to the v1 API") @@ -1297,7 +1261,7 @@ async def _do_send_invite( # fallback to the v1 endpoint if the room uses old-style event IDs. # Otherwise, consider it a legitimate error and raise. err = e.to_synapse_error() - if self._is_unknown_endpoint(e, err): + if is_unknown_endpoint(e, err): if room_version.event_format != EventFormatVersions.ROOM_V1_V2: raise SynapseError( 400, @@ -1358,7 +1322,7 @@ async def _do_send_leave(self, destination: str, pdu: EventBase) -> JsonDict: # If an error is received that is due to an unrecognised endpoint, # fallback to the v1 endpoint. Otherwise, consider it a legitimate error # and raise. - if not self._is_unknown_endpoint(e): + if not is_unknown_endpoint(e): raise logger.debug("Couldn't send_leave with the v2 API, falling back to the v1 API") @@ -1629,7 +1593,7 @@ async def send_request( # If an error is received that is due to an unrecognised endpoint, # fallback to the unstable endpoint. Otherwise, consider it a # legitimate error and raise. - if not self._is_unknown_endpoint(e): + if not is_unknown_endpoint(e): raise logger.debug( diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 953df4d9cd26..da887647d4d8 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -18,6 +18,7 @@ Dict, Iterable, List, + Mapping, Optional, Tuple, Union, @@ -846,6 +847,10 @@ async def claim_e2e_one_time_keys( ]: """Claim one time keys from application services. + Users which are exclusively owned by an application service are sent a + key claim request to check if the application service provides keys + directly. + Args: query: An iterable of tuples of (user ID, device ID, algorithm). @@ -901,3 +906,59 @@ async def claim_e2e_one_time_keys( missing.extend(result[1]) return claimed_keys, missing + + async def query_keys( + self, query: Mapping[str, Optional[List[str]]] + ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + """Query application services for device keys. + + Users which are exclusively owned by an application service are queried + for keys to check if the application service provides keys directly. + + Args: + query: map from user_id to a list of devices to query + + Returns: + A map from user_id -> device_id -> device details + """ + services = self.store.get_app_services() + + # Partition the users by appservice. + query_by_appservice: Dict[str, Dict[str, List[str]]] = {} + for user_id, device_ids in query.items(): + if not self.store.get_if_app_services_interested_in_user(user_id): + continue + + # Find the associated appservice. + for service in services: + if service.is_exclusive_user(user_id): + query_by_appservice.setdefault(service.id, {})[user_id] = ( + device_ids or [] + ) + continue + + # Query each service in parallel. + results = await make_deferred_yieldable( + defer.DeferredList( + [ + run_in_background( + self.appservice_api.query_keys, + # We know this must be an app service. + self.store.get_app_service_by_id(service_id), # type: ignore[arg-type] + service_query, + ) + for service_id, service_query in query_by_appservice.items() + ], + consumeErrors=True, + ) + ) + + # Patch together the results -- they are all independent (since they + # require exclusive control over the users). They get returned as a single + # dictionary. + key_queries: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + for success, result in results: + if success: + key_queries.update(result) + + return key_queries diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 9e7c2c45b557..007366747014 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -91,6 +91,9 @@ def __init__(self, hs: "HomeServer"): self._query_appservices_for_otks = ( hs.config.experimental.msc3983_appservice_otk_claims ) + self._query_appservices_for_keys = ( + hs.config.experimental.msc3984_appservice_key_query + ) @trace @cancellable @@ -497,6 +500,19 @@ async def query_local_devices( local_query, include_displaynames ) + # Check if the application services have any additional results. + if self._query_appservices_for_keys: + # Query the appservices for any keys. + appservice_results = await self._appservice_handler.query_keys(query) + + # Merge results, overriding with what the appservice returned. + for user_id, devices in appservice_results.get("device_keys", {}).items(): + # Copy the appservice device info over the homeserver device info, but + # don't completely overwrite it. + results.setdefault(user_id, {}).update(devices) + + # TODO Handle cross-signing keys. + # Build the result structure for user_id, device_keys in results.items(): for device_id, device_info in device_keys.items(): diff --git a/synapse/http/client.py b/synapse/http/client.py index d777d59ccf5d..5ee55981d9f8 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -966,3 +966,41 @@ def getContext(self) -> SSL.Context: def creatorForNetloc(self, hostname: bytes, port: int) -> IOpenSSLContextFactory: return self + + +def is_unknown_endpoint( + e: HttpResponseException, synapse_error: Optional[SynapseError] = None +) -> bool: + """ + Returns true if the response was due to an endpoint being unimplemented. + + Args: + e: The error response received from the remote server. + synapse_error: The above error converted to a SynapseError. This is + automatically generated if not provided. + + """ + if synapse_error is None: + synapse_error = e.to_synapse_error() + # MSC3743 specifies that servers should return a 404 or 405 with an errcode + # of M_UNRECOGNIZED when they receive a request to an unknown endpoint or + # to an unknown method, respectively. + # + # Older versions of servers don't properly handle this. This needs to be + # rather specific as some endpoints truly do return 404 errors. + return ( + # 404 is an unknown endpoint, 405 is a known endpoint, but unknown method. + (e.code == 404 or e.code == 405) + and ( + # Older Dendrites returned a text body or empty body. + # Older Conduit returned an empty body. + not e.response + or e.response == b"404 page not found" + # The proper response JSON with M_UNRECOGNIZED errcode. + or synapse_error.errcode == Codes.UNRECOGNIZED + ) + ) or ( + # Older Synapses returned a 400 error. + e.code == 400 + and synapse_error.errcode == Codes.UNRECOGNIZED + ) diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 4ff04fc66bb7..013b9ee5504f 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -960,7 +960,7 @@ def test_query_appservice(self) -> None: appservice = ApplicationService( token="i_am_an_app_service", id="1234", - namespaces={"users": [{"regex": r"@boris:*", "exclusive": True}]}, + namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, # Note: this user does not have to match the regex above sender="@as_main:test", ) @@ -1015,3 +1015,122 @@ def test_query_appservice(self) -> None: }, }, ) + + @override_config({"experimental_features": {"msc3984_appservice_key_query": True}}) + def test_query_local_devices_appservice(self) -> None: + """Test that querying of appservices for keys overrides responses from the database.""" + local_user = "@boris:" + self.hs.hostname + device_1 = "abc" + device_2 = "def" + device_3 = "ghi" + + # There are 3 devices: + # + # 1. One which is uploaded to the homeserver. + # 2. One which is uploaded to the homeserver, but a newer copy is returned + # by the appservice. + # 3. One which is only returned by the appservice. + device_key_1: JsonDict = { + "user_id": local_user, + "device_id": device_1, + "algorithms": [ + "m.olm.curve25519-aes-sha2", + RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, + ], + "keys": { + "ed25519:abc": "base64+ed25519+key", + "curve25519:abc": "base64+curve25519+key", + }, + "signatures": {local_user: {"ed25519:abc": "base64+signature"}}, + } + device_key_2a: JsonDict = { + "user_id": local_user, + "device_id": device_2, + "algorithms": [ + "m.olm.curve25519-aes-sha2", + RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, + ], + "keys": { + "ed25519:def": "base64+ed25519+key", + "curve25519:def": "base64+curve25519+key", + }, + "signatures": {local_user: {"ed25519:def": "base64+signature"}}, + } + + device_key_2b: JsonDict = { + "user_id": local_user, + "device_id": device_2, + "algorithms": [ + "m.olm.curve25519-aes-sha2", + RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, + ], + # The device ID is the same (above), but the keys are different. + "keys": { + "ed25519:xyz": "base64+ed25519+key", + "curve25519:xyz": "base64+curve25519+key", + }, + "signatures": {local_user: {"ed25519:xyz": "base64+signature"}}, + } + device_key_3: JsonDict = { + "user_id": local_user, + "device_id": device_3, + "algorithms": [ + "m.olm.curve25519-aes-sha2", + RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2, + ], + "keys": { + "ed25519:jkl": "base64+ed25519+key", + "curve25519:jkl": "base64+curve25519+key", + }, + "signatures": {local_user: {"ed25519:jkl": "base64+signature"}}, + } + + # Upload keys for devices 1 & 2a. + self.get_success( + self.handler.upload_keys_for_user( + local_user, device_1, {"device_keys": device_key_1} + ) + ) + self.get_success( + self.handler.upload_keys_for_user( + local_user, device_2, {"device_keys": device_key_2a} + ) + ) + + # Inject an appservice interested in this user. + appservice = ApplicationService( + token="i_am_an_app_service", + id="1234", + namespaces={"users": [{"regex": r"@boris:.+", "exclusive": True}]}, + # Note: this user does not have to match the regex above + sender="@as_main:test", + ) + self.hs.get_datastores().main.services_cache = [appservice] + self.hs.get_datastores().main.exclusive_user_regex = _make_exclusive_regex( + [appservice] + ) + + # Setup a response. + self.appservice_api.query_keys.return_value = make_awaitable( + { + "device_keys": { + local_user: {device_2: device_key_2b, device_3: device_key_3} + } + } + ) + + # Request all devices. + res = self.get_success(self.handler.query_local_devices({local_user: None})) + self.assertIn(local_user, res) + for res_key in res[local_user].values(): + res_key.pop("unsigned", None) + self.assertDictEqual( + res, + { + local_user: { + device_1: device_key_1, + device_2: device_key_2b, + device_3: device_key_3, + } + }, + ) From 91c3f32673e0c62ea603dcc18f7f21f73c011f33 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Mar 2023 16:21:12 +0100 Subject: [PATCH 041/106] Speed up SQLite unit test CI (#15334) Tests now take 40% of the time. --- changelog.d/15334.misc | 1 + synapse/storage/engines/sqlite.py | 17 ++++++++++++++++- tests/server.py | 23 +++++++++++++++++++++++ tests/unittest.py | 16 +++++++++++++--- 4 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15334.misc diff --git a/changelog.d/15334.misc b/changelog.d/15334.misc new file mode 100644 index 000000000000..0c30818ed015 --- /dev/null +++ b/changelog.d/15334.misc @@ -0,0 +1 @@ +Speed up unit tests when using SQLite3. diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index 28751e89a5a5..ca8c59297c42 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -34,6 +34,13 @@ def __init__(self, database_config: Mapping[str, Any]): ":memory:", ) + # A connection to a database that has already been prepared, to use as a + # base for an in-memory connection. This is used during unit tests to + # speed up setting up the DB. + self._prepped_conn: Optional[sqlite3.Connection] = database_config.get( + "_TEST_PREPPED_CONN" + ) + if platform.python_implementation() == "PyPy": # pypy's sqlite3 module doesn't handle bytearrays, convert them # back to bytes. @@ -84,7 +91,15 @@ def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: # In memory databases need to be rebuilt each time. Ideally we'd # reuse the same connection as we do when starting up, but that # would involve using adbapi before we have started the reactor. - prepare_database(db_conn, self, config=None) + # + # If we have a `prepped_conn` we can use that to initialise the DB, + # otherwise we need to call `prepare_database`. + if self._prepped_conn is not None: + # Initialise the new DB from the pre-prepared DB. + assert isinstance(db_conn.conn, sqlite3.Connection) + self._prepped_conn.backup(db_conn.conn) + else: + prepare_database(db_conn, self, config=None) db_conn.create_function("rank", 1, _rank) db_conn.execute("PRAGMA foreign_keys = ON;") diff --git a/tests/server.py b/tests/server.py index bb059630fa15..b52ff1c4632c 100644 --- a/tests/server.py +++ b/tests/server.py @@ -16,6 +16,7 @@ import logging import os import os.path +import sqlite3 import time import uuid import warnings @@ -79,7 +80,9 @@ from synapse.logging.context import ContextResourceUsage from synapse.server import HomeServer from synapse.storage import DataStore +from synapse.storage.database import LoggingDatabaseConnection from synapse.storage.engines import PostgresEngine, create_engine +from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor, JsonDict from synapse.util import Clock @@ -104,6 +107,10 @@ # the type of thing that can be passed into `make_request` in the headers list CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]] +# A pre-prepared SQLite DB that is used as a template when creating new SQLite +# DB each test run. This dramatically speeds up test set up when using SQLite. +PREPPED_SQLITE_DB_CONN: Optional[LoggingDatabaseConnection] = None + class TimedOutException(Exception): """ @@ -899,6 +906,22 @@ def setup_test_homeserver( "args": {"database": test_db_location, "cp_min": 1, "cp_max": 1}, } + # Check if we have set up a DB that we can use as a template. + global PREPPED_SQLITE_DB_CONN + if PREPPED_SQLITE_DB_CONN is None: + temp_engine = create_engine(database_config) + PREPPED_SQLITE_DB_CONN = LoggingDatabaseConnection( + sqlite3.connect(":memory:"), temp_engine, "PREPPED_CONN" + ) + + database = DatabaseConnectionConfig("master", database_config) + config.database.databases = [database] + prepare_database( + PREPPED_SQLITE_DB_CONN, create_engine(database_config), config + ) + + database_config["_TEST_PREPPED_CONN"] = PREPPED_SQLITE_DB_CONN + if "db_txn_limit" in kwargs: database_config["txn_limit"] = kwargs["db_txn_limit"] diff --git a/tests/unittest.py b/tests/unittest.py index f9160faa1d0c..8a16fd366592 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -146,6 +146,9 @@ def setUp(orig: Callable[[], R]) -> R: % (current_context(),) ) + # Disable GC for duration of test. See below for why. + gc.disable() + old_level = logging.getLogger().level if level is not None and old_level != level: @@ -163,12 +166,19 @@ def tearDown(orig: Callable[[], R]) -> R: return orig() + # We want to force a GC to workaround problems with deferreds leaking + # logcontexts when they are GCed (see the logcontext docs). + # + # The easiest way to do this would be to do a full GC after each test + # run, but that is very expensive. Instead, we disable GC (above) for + # the duration of the test so that we only need to run a gen-0 GC, which + # is a lot quicker. + @around(self) def tearDown(orig: Callable[[], R]) -> R: ret = orig() - # force a GC to workaround problems with deferreds leaking logcontexts when - # they are GCed (see the logcontext docs) - gc.collect() + gc.collect(0) + gc.enable() set_current_context(SENTINEL_CONTEXT) return ret From 6f68e32bfbe439435410e81ac70fdca10f28fbf7 Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Thu, 30 Mar 2023 19:41:14 +0200 Subject: [PATCH 042/106] to_device updates could be dropped when consuming the replication stream (#15349) Co-authored-by: reivilibre --- changelog.d/15349.bugfix | 1 + synapse/storage/databases/main/deviceinbox.py | 14 +-- tests/replication/_base.py | 4 + .../tcp/streams/test_account_data.py | 5 -- .../replication/tcp/streams/test_to_device.py | 89 +++++++++++++++++++ 5 files changed, 98 insertions(+), 15 deletions(-) create mode 100644 changelog.d/15349.bugfix create mode 100644 tests/replication/tcp/streams/test_to_device.py diff --git a/changelog.d/15349.bugfix b/changelog.d/15349.bugfix new file mode 100644 index 000000000000..65ea7ae7eb1e --- /dev/null +++ b/changelog.d/15349.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where some to_device messages could be dropped when using workers. diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 0d75d9739a70..b471fcb064a2 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -617,14 +617,14 @@ def get_all_new_device_messages_txn( # We limit like this as we might have multiple rows per stream_id, and # we want to make sure we always get all entries for any stream_id # we return. - upper_pos = min(current_id, last_id + limit) + upto_token = min(current_id, last_id + limit) sql = ( "SELECT max(stream_id), user_id" " FROM device_inbox" " WHERE ? < stream_id AND stream_id <= ?" " GROUP BY user_id" ) - txn.execute(sql, (last_id, upper_pos)) + txn.execute(sql, (last_id, upto_token)) updates = [(row[0], row[1:]) for row in txn] sql = ( @@ -633,19 +633,13 @@ def get_all_new_device_messages_txn( " WHERE ? < stream_id AND stream_id <= ?" " GROUP BY destination" ) - txn.execute(sql, (last_id, upper_pos)) + txn.execute(sql, (last_id, upto_token)) updates.extend((row[0], row[1:]) for row in txn) # Order by ascending stream ordering updates.sort() - limited = False - upto_token = current_id - if len(updates) >= limit: - upto_token = updates[-1][0] - limited = True - - return updates, upto_token, limited + return updates, upto_token, upto_token < current_id return await self.db_pool.runInteraction( "get_all_new_device_messages", get_all_new_device_messages_txn diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 46a8e2013e41..0f1a8a145f6f 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -54,6 +54,10 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): if not hiredis: skip = "Requires hiredis" + if not USE_POSTGRES_FOR_TESTS: + # Redis replication only takes place on Postgres + skip = "Requires Postgres" + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # build a replication server server_factory = ReplicationStreamProtocolFactory(hs) diff --git a/tests/replication/tcp/streams/test_account_data.py b/tests/replication/tcp/streams/test_account_data.py index 01df1be0473c..b9075e3f207d 100644 --- a/tests/replication/tcp/streams/test_account_data.py +++ b/tests/replication/tcp/streams/test_account_data.py @@ -37,11 +37,6 @@ def test_update_function_room_account_data_limit(self) -> None: # also one global update self.get_success(store.add_account_data_for_user("test_user", "m.global", {})) - # tell the notifier to catch up to avoid duplicate rows. - # workaround for https://github.com/matrix-org/synapse/issues/7360 - # FIXME remove this when the above is fixed - self.replicate() - # check we're testing what we think we are: no rows should yet have been # received self.assertEqual([], self.test_handler.received_rdata_rows) diff --git a/tests/replication/tcp/streams/test_to_device.py b/tests/replication/tcp/streams/test_to_device.py new file mode 100644 index 000000000000..fb9eac668f19 --- /dev/null +++ b/tests/replication/tcp/streams/test_to_device.py @@ -0,0 +1,89 @@ +# Copyright 2023 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +import synapse +from synapse.replication.tcp.streams._base import _STREAM_UPDATE_TARGET_ROW_COUNT +from synapse.types import JsonDict + +from tests.replication._base import BaseStreamTestCase + +logger = logging.getLogger(__name__) + + +class ToDeviceStreamTestCase(BaseStreamTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + synapse.rest.client.login.register_servlets, + ] + + def test_to_device_stream(self) -> None: + store = self.hs.get_datastores().main + + user1 = self.register_user("user1", "pass") + self.login("user1", "pass", "device") + user2 = self.register_user("user2", "pass") + self.login("user2", "pass", "device") + + # connect to pull the updates related to users creation/login + self.reconnect() + self.replicate() + self.test_handler.received_rdata_rows.clear() + # disconnect so we can accumulate the updates without pulling them + self.disconnect() + + msg: JsonDict = {} + msg["sender"] = "@sender:example.org" + msg["type"] = "m.new_device" + + # add messages to the device inbox for user1 up until the + # limit defined for a stream update batch + for i in range(0, _STREAM_UPDATE_TARGET_ROW_COUNT): + msg["content"] = {"device": {}} + messages = {user1: {"device": msg}} + + self.get_success( + store.add_messages_from_remote_to_device_inbox( + "example.org", + f"{i}", + messages, + ) + ) + + # add one more message, for user2 this time + # this message would be dropped before fixing #15335 + msg["content"] = {"device": {}} + messages = {user2: {"device": msg}} + + self.get_success( + store.add_messages_from_remote_to_device_inbox( + "example.org", + f"{_STREAM_UPDATE_TARGET_ROW_COUNT}", + messages, + ) + ) + + # replication is disconnected so we shouldn't get any updates yet + self.assertEqual([], self.test_handler.received_rdata_rows) + + # now reconnect to pull the updates + self.reconnect() + self.replicate() + + # we should receive the fact that we have to_device updates + # for user1 and user2 + received_rows = self.test_handler.received_rdata_rows + self.assertEqual(len(received_rows), 2) + self.assertEqual(received_rows[0][2].entity, user1) + self.assertEqual(received_rows[1][2].entity, user2) From 2a234b788e2b5706ee83cf8eb86dfd004bc7c166 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 30 Mar 2023 15:11:31 -0400 Subject: [PATCH 043/106] Set thread_id column to non-null for event_push_{actions,actions_staging,summary} (#15350) Clean-up from adding the thread_id column, which was initially null but backfilled with values. It is desirable to require it to now be non-null. In addition to altering this column to be non-null, we clean up obsolete background jobs, indexes, and just-in-time updating code. --- changelog.d/15350.misc | 1 + .../databases/main/event_push_actions.py | 240 ------------------ synapse/storage/schema/__init__.py | 6 +- .../74/02thread_notifications_backfill.sql | 28 ++ ...thread_notifications_not_null.sql.postgres | 23 ++ ...03thread_notifications_not_null.sql.sqlite | 99 ++++++++ 6 files changed, 154 insertions(+), 243 deletions(-) create mode 100644 changelog.d/15350.misc create mode 100644 synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql create mode 100644 synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres create mode 100644 synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite diff --git a/changelog.d/15350.misc b/changelog.d/15350.misc new file mode 100644 index 000000000000..2dea23784f22 --- /dev/null +++ b/changelog.d/15350.misc @@ -0,0 +1 @@ +Make the `thread_id` column on `event_push_actions`, `event_push_actions_staging`, and `event_push_summary` non-null. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index eeccf5db2433..6afc51320a68 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -100,7 +100,6 @@ ) from synapse.storage.databases.main.receipts import ReceiptsWorkerStore from synapse.storage.databases.main.stream import StreamWorkerStore -from synapse.types import JsonDict from synapse.util import json_encoder from synapse.util.caches.descriptors import cached @@ -289,180 +288,6 @@ def __init__( unique=True, ) - self.db_pool.updates.register_background_update_handler( - "event_push_backfill_thread_id", - self._background_backfill_thread_id, - ) - - # Indexes which will be used to quickly make the thread_id column non-null. - self.db_pool.updates.register_background_index_update( - "event_push_actions_thread_id_null", - index_name="event_push_actions_thread_id_null", - table="event_push_actions", - columns=["thread_id"], - where_clause="thread_id IS NULL", - ) - self.db_pool.updates.register_background_index_update( - "event_push_summary_thread_id_null", - index_name="event_push_summary_thread_id_null", - table="event_push_summary", - columns=["thread_id"], - where_clause="thread_id IS NULL", - ) - - # Check ASAP (and then later, every 1s) to see if we have finished - # background updates the event_push_actions and event_push_summary tables. - self._clock.call_later(0.0, self._check_event_push_backfill_thread_id) - self._event_push_backfill_thread_id_done = False - - @wrap_as_background_process("check_event_push_backfill_thread_id") - async def _check_event_push_backfill_thread_id(self) -> None: - """ - Has thread_id finished backfilling? - - If not, we need to just-in-time update it so the queries work. - """ - done = await self.db_pool.updates.has_completed_background_update( - "event_push_backfill_thread_id" - ) - - if done: - self._event_push_backfill_thread_id_done = True - else: - # Reschedule to run. - self._clock.call_later(15.0, self._check_event_push_backfill_thread_id) - - async def _background_backfill_thread_id( - self, progress: JsonDict, batch_size: int - ) -> int: - """ - Fill in the thread_id field for event_push_actions and event_push_summary. - - This is preparatory so that it can be made non-nullable in the future. - - Because all current (null) data is done in an unthreaded manner this - simply assumes it is on the "main" timeline. Since event_push_actions - are periodically cleared it is not possible to correctly re-calculate - the thread_id. - """ - event_push_actions_done = progress.get("event_push_actions_done", False) - - def add_thread_id_txn( - txn: LoggingTransaction, start_stream_ordering: int - ) -> int: - sql = """ - SELECT stream_ordering - FROM event_push_actions - WHERE - thread_id IS NULL - AND stream_ordering > ? - ORDER BY stream_ordering - LIMIT ? - """ - txn.execute(sql, (start_stream_ordering, batch_size)) - - # No more rows to process. - rows = txn.fetchall() - if not rows: - progress["event_push_actions_done"] = True - self.db_pool.updates._background_update_progress_txn( - txn, "event_push_backfill_thread_id", progress - ) - return 0 - - # Update the thread ID for any of those rows. - max_stream_ordering = rows[-1][0] - - sql = """ - UPDATE event_push_actions - SET thread_id = 'main' - WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL - """ - txn.execute( - sql, - ( - start_stream_ordering, - max_stream_ordering, - ), - ) - - # Update progress. - processed_rows = txn.rowcount - progress["max_event_push_actions_stream_ordering"] = max_stream_ordering - self.db_pool.updates._background_update_progress_txn( - txn, "event_push_backfill_thread_id", progress - ) - - return processed_rows - - def add_thread_id_summary_txn(txn: LoggingTransaction) -> int: - min_user_id = progress.get("max_summary_user_id", "") - min_room_id = progress.get("max_summary_room_id", "") - - # Slightly overcomplicated query for getting the Nth user ID / room - # ID tuple, or the last if there are less than N remaining. - sql = """ - SELECT user_id, room_id FROM ( - SELECT user_id, room_id FROM event_push_summary - WHERE (user_id, room_id) > (?, ?) - AND thread_id IS NULL - ORDER BY user_id, room_id - LIMIT ? - ) AS e - ORDER BY user_id DESC, room_id DESC - LIMIT 1 - """ - - txn.execute(sql, (min_user_id, min_room_id, batch_size)) - row = txn.fetchone() - if not row: - return 0 - - max_user_id, max_room_id = row - - sql = """ - UPDATE event_push_summary - SET thread_id = 'main' - WHERE - (?, ?) < (user_id, room_id) AND (user_id, room_id) <= (?, ?) - AND thread_id IS NULL - """ - txn.execute(sql, (min_user_id, min_room_id, max_user_id, max_room_id)) - processed_rows = txn.rowcount - - progress["max_summary_user_id"] = max_user_id - progress["max_summary_room_id"] = max_room_id - self.db_pool.updates._background_update_progress_txn( - txn, "event_push_backfill_thread_id", progress - ) - - return processed_rows - - # First update the event_push_actions table, then the event_push_summary table. - # - # Note that the event_push_actions_staging table is ignored since it is - # assumed that items in that table will only exist for a short period of - # time. - if not event_push_actions_done: - result = await self.db_pool.runInteraction( - "event_push_backfill_thread_id", - add_thread_id_txn, - progress.get("max_event_push_actions_stream_ordering", 0), - ) - else: - result = await self.db_pool.runInteraction( - "event_push_backfill_thread_id", - add_thread_id_summary_txn, - ) - - # Only done after the event_push_summary table is done. - if not result: - await self.db_pool.updates._end_background_update( - "event_push_backfill_thread_id" - ) - - return result - async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, int]: """Get the notification count by room for a user. Only considers notifications, not highlight or unread counts, and threads are currently aggregated under their room. @@ -711,25 +536,6 @@ def _get_thread(thread_id: str) -> NotifCounts: (ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE), ) - # First ensure that the existing rows have an updated thread_id field. - if not self._event_push_backfill_thread_id_done: - txn.execute( - """ - UPDATE event_push_summary - SET thread_id = ? - WHERE room_id = ? AND user_id = ? AND thread_id is NULL - """, - (MAIN_TIMELINE, room_id, user_id), - ) - txn.execute( - """ - UPDATE event_push_actions - SET thread_id = ? - WHERE room_id = ? AND user_id = ? AND thread_id is NULL - """, - (MAIN_TIMELINE, room_id, user_id), - ) - # First we pull the counts from the summary table. # # We check that `last_receipt_stream_ordering` matches the stream ordering of the @@ -1545,25 +1351,6 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: (room_id, user_id, stream_ordering, *thread_args), ) - # First ensure that the existing rows have an updated thread_id field. - if not self._event_push_backfill_thread_id_done: - txn.execute( - """ - UPDATE event_push_summary - SET thread_id = ? - WHERE room_id = ? AND user_id = ? AND thread_id is NULL - """, - (MAIN_TIMELINE, room_id, user_id), - ) - txn.execute( - """ - UPDATE event_push_actions - SET thread_id = ? - WHERE room_id = ? AND user_id = ? AND thread_id is NULL - """, - (MAIN_TIMELINE, room_id, user_id), - ) - # Fetch the notification counts between the stream ordering of the # latest receipt and what was previously summarised. unread_counts = self._get_notif_unread_count_for_user_room( @@ -1698,19 +1485,6 @@ def _rotate_notifs_before_txn( rotate_to_stream_ordering: The new maximum event stream ordering to summarise. """ - # Ensure that any new actions have an updated thread_id. - if not self._event_push_backfill_thread_id_done: - txn.execute( - """ - UPDATE event_push_actions - SET thread_id = ? - WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL - """, - (MAIN_TIMELINE, old_rotate_stream_ordering, rotate_to_stream_ordering), - ) - - # XXX Do we need to update summaries here too? - # Calculate the new counts that should be upserted into event_push_summary sql = """ SELECT user_id, room_id, thread_id, @@ -1773,20 +1547,6 @@ def _rotate_notifs_before_txn( logger.info("Rotating notifications, handling %d rows", len(summaries)) - # Ensure that any updated threads have the proper thread_id. - if not self._event_push_backfill_thread_id_done: - txn.execute_batch( - """ - UPDATE event_push_summary - SET thread_id = ? - WHERE room_id = ? AND user_id = ? AND thread_id is NULL - """, - [ - (MAIN_TIMELINE, room_id, user_id) - for user_id, room_id, _ in summaries - ], - ) - self.db_pool.simple_upsert_many_txn( txn, table="event_push_summary", diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index d3103a6c7a05..72bbb3a7c2c6 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -95,9 +95,9 @@ SCHEMA_COMPAT_VERSION = ( - # The threads_id column must exist for event_push_actions, event_push_summary, - # receipts_linearized, and receipts_graph. - 73 + # The threads_id column must written to with non-null values event_push_actions, + # event_push_actions_staging, and event_push_summary. + 74 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql b/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql new file mode 100644 index 000000000000..ce6f9ff93748 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql @@ -0,0 +1,28 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Force the background updates from 06thread_notifications.sql to run in the +-- foreground as code will now require those to be "done". + +DELETE FROM background_updates WHERE update_name = 'event_push_backfill_thread_id'; + +-- Overwrite any null thread_id values. +UPDATE event_push_actions_staging SET thread_id = 'main' WHERE thread_id IS NULL; +UPDATE event_push_actions SET thread_id = 'main' WHERE thread_id IS NULL; +UPDATE event_push_summary SET thread_id = 'main' WHERE thread_id IS NULL; + +-- Drop the background updates to calculate the indexes used to find null thread_ids. +DELETE FROM background_updates WHERE update_name = 'event_push_actions_thread_id_null'; +DELETE FROM background_updates WHERE update_name = 'event_push_summary_thread_id_null'; diff --git a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres new file mode 100644 index 000000000000..5f6866742543 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres @@ -0,0 +1,23 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Drop the indexes used to find null thread_ids. +DROP INDEX IF EXISTS event_push_actions_thread_id_null; +DROP INDEX IF EXISTS event_push_summary_thread_id_null; + +-- The thread_id columns can now be made non-nullable. +ALTER TABLE event_push_actions_staging ALTER COLUMN thread_id SET NOT NULL; +ALTER TABLE event_push_actions ALTER COLUMN thread_id SET NOT NULL; +ALTER TABLE event_push_summary ALTER COLUMN thread_id SET NOT NULL; diff --git a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite new file mode 100644 index 000000000000..f46b2335605e --- /dev/null +++ b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite @@ -0,0 +1,99 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + -- The thread_id columns can now be made non-nullable. +-- +-- SQLite doesn't support modifying columns to an existing table, so it must +-- be recreated. + +-- Create the new tables. +CREATE TABLE event_push_actions_staging_new ( + event_id TEXT NOT NULL, + user_id TEXT NOT NULL, + actions TEXT NOT NULL, + notif SMALLINT NOT NULL, + highlight SMALLINT NOT NULL, + unread SMALLINT, + thread_id TEXT NOT NULL, + inserted_ts BIGINT +); + +CREATE TABLE event_push_actions_new ( + room_id TEXT NOT NULL, + event_id TEXT NOT NULL, + user_id TEXT NOT NULL, + profile_tag VARCHAR(32), + actions TEXT NOT NULL, + topological_ordering BIGINT, + stream_ordering BIGINT, + notif SMALLINT, + highlight SMALLINT, + unread SMALLINT, + thread_id TEXT NOT NULL, + CONSTRAINT event_id_user_id_profile_tag_uniqueness UNIQUE (room_id, event_id, user_id, profile_tag) +); + +CREATE TABLE event_push_summary_new ( + user_id TEXT NOT NULL, + room_id TEXT NOT NULL, + notif_count BIGINT NOT NULL, + stream_ordering BIGINT NOT NULL, + unread_count BIGINT, + last_receipt_stream_ordering BIGINT, + thread_id TEXT NOT NULL +); + +-- Copy the data. +INSERT INTO event_push_actions_staging_new (event_id, user_id, actions, notif, highlight, unread, thread_id, inserted_ts) + SELECT event_id, user_id, actions, notif, highlight, unread, thread_id, inserted_ts + FROM event_push_actions_staging; + +INSERT INTO event_push_actions_new (room_id, event_id, user_id, profile_tag, actions, topological_ordering, stream_ordering, notif, highlight, unread, thread_id) + SELECT room_id, event_id, user_id, profile_tag, actions, topological_ordering, stream_ordering, notif, highlight, unread, thread_id + FROM event_push_actions; + +INSERT INTO event_push_summary_new (user_id, room_id, notif_count, stream_ordering, unread_count, last_receipt_stream_ordering, thread_id) + SELECT user_id, room_id, notif_count, stream_ordering, unread_count, last_receipt_stream_ordering, thread_id + FROM event_push_summary; + +-- Drop the old tables. +DROP TABLE event_push_actions_staging; +DROP TABLE event_push_actions; +DROP TABLE event_push_summary; + +-- Rename the tables. +ALTER TABLE event_push_actions_staging_new RENAME TO event_push_actions_staging; +ALTER TABLE event_push_actions_new RENAME TO event_push_actions; +ALTER TABLE event_push_summary_new RENAME TO event_push_summary; + +-- Recreate the indexes. +CREATE INDEX event_push_actions_staging_id ON event_push_actions_staging(event_id); + +CREATE INDEX event_push_actions_highlights_index ON event_push_actions (user_id, room_id, topological_ordering, stream_ordering); +CREATE INDEX event_push_actions_rm_tokens on event_push_actions( user_id, room_id, topological_ordering, stream_ordering ); +CREATE INDEX event_push_actions_room_id_user_id on event_push_actions(room_id, user_id); +CREATE INDEX event_push_actions_stream_ordering on event_push_actions( stream_ordering, user_id ); +CREATE INDEX event_push_actions_u_highlight ON event_push_actions (user_id, stream_ordering); + +CREATE UNIQUE INDEX event_push_summary_unique_index2 ON event_push_summary (user_id, room_id, thread_id) ; + +-- Recreate some indexes in the background, by re-running the background updates +-- from 72/02event_push_actions_index.sql and 72/06thread_notifications.sql. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (7403, 'event_push_summary_unique_index2', '{}') + ON CONFLICT (update_name) DO UPDATE SET progress_json = '{}'; +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (7403, 'event_push_actions_stream_highlight_index', '{}') + ON CONFLICT (update_name) DO UPDATE SET progress_json = '{}'; From 72d2ceaa9adfbc0f19a0c540e5a63263af47755f Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Fri, 31 Mar 2023 12:10:09 +0100 Subject: [PATCH 044/106] Revert "Set thread_id column to non-null for event_push_{actions,actions_staging,summary} (#15350)" This reverts commit 2a234b788e2b5706ee83cf8eb86dfd004bc7c166. See #15359 for context. --- changelog.d/15350.misc | 1 - .../databases/main/event_push_actions.py | 240 ++++++++++++++++++ synapse/storage/schema/__init__.py | 6 +- .../74/02thread_notifications_backfill.sql | 28 -- ...thread_notifications_not_null.sql.postgres | 23 -- ...03thread_notifications_not_null.sql.sqlite | 99 -------- 6 files changed, 243 insertions(+), 154 deletions(-) delete mode 100644 changelog.d/15350.misc delete mode 100644 synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql delete mode 100644 synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres delete mode 100644 synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite diff --git a/changelog.d/15350.misc b/changelog.d/15350.misc deleted file mode 100644 index 2dea23784f22..000000000000 --- a/changelog.d/15350.misc +++ /dev/null @@ -1 +0,0 @@ -Make the `thread_id` column on `event_push_actions`, `event_push_actions_staging`, and `event_push_summary` non-null. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 6afc51320a68..eeccf5db2433 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -100,6 +100,7 @@ ) from synapse.storage.databases.main.receipts import ReceiptsWorkerStore from synapse.storage.databases.main.stream import StreamWorkerStore +from synapse.types import JsonDict from synapse.util import json_encoder from synapse.util.caches.descriptors import cached @@ -288,6 +289,180 @@ def __init__( unique=True, ) + self.db_pool.updates.register_background_update_handler( + "event_push_backfill_thread_id", + self._background_backfill_thread_id, + ) + + # Indexes which will be used to quickly make the thread_id column non-null. + self.db_pool.updates.register_background_index_update( + "event_push_actions_thread_id_null", + index_name="event_push_actions_thread_id_null", + table="event_push_actions", + columns=["thread_id"], + where_clause="thread_id IS NULL", + ) + self.db_pool.updates.register_background_index_update( + "event_push_summary_thread_id_null", + index_name="event_push_summary_thread_id_null", + table="event_push_summary", + columns=["thread_id"], + where_clause="thread_id IS NULL", + ) + + # Check ASAP (and then later, every 1s) to see if we have finished + # background updates the event_push_actions and event_push_summary tables. + self._clock.call_later(0.0, self._check_event_push_backfill_thread_id) + self._event_push_backfill_thread_id_done = False + + @wrap_as_background_process("check_event_push_backfill_thread_id") + async def _check_event_push_backfill_thread_id(self) -> None: + """ + Has thread_id finished backfilling? + + If not, we need to just-in-time update it so the queries work. + """ + done = await self.db_pool.updates.has_completed_background_update( + "event_push_backfill_thread_id" + ) + + if done: + self._event_push_backfill_thread_id_done = True + else: + # Reschedule to run. + self._clock.call_later(15.0, self._check_event_push_backfill_thread_id) + + async def _background_backfill_thread_id( + self, progress: JsonDict, batch_size: int + ) -> int: + """ + Fill in the thread_id field for event_push_actions and event_push_summary. + + This is preparatory so that it can be made non-nullable in the future. + + Because all current (null) data is done in an unthreaded manner this + simply assumes it is on the "main" timeline. Since event_push_actions + are periodically cleared it is not possible to correctly re-calculate + the thread_id. + """ + event_push_actions_done = progress.get("event_push_actions_done", False) + + def add_thread_id_txn( + txn: LoggingTransaction, start_stream_ordering: int + ) -> int: + sql = """ + SELECT stream_ordering + FROM event_push_actions + WHERE + thread_id IS NULL + AND stream_ordering > ? + ORDER BY stream_ordering + LIMIT ? + """ + txn.execute(sql, (start_stream_ordering, batch_size)) + + # No more rows to process. + rows = txn.fetchall() + if not rows: + progress["event_push_actions_done"] = True + self.db_pool.updates._background_update_progress_txn( + txn, "event_push_backfill_thread_id", progress + ) + return 0 + + # Update the thread ID for any of those rows. + max_stream_ordering = rows[-1][0] + + sql = """ + UPDATE event_push_actions + SET thread_id = 'main' + WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL + """ + txn.execute( + sql, + ( + start_stream_ordering, + max_stream_ordering, + ), + ) + + # Update progress. + processed_rows = txn.rowcount + progress["max_event_push_actions_stream_ordering"] = max_stream_ordering + self.db_pool.updates._background_update_progress_txn( + txn, "event_push_backfill_thread_id", progress + ) + + return processed_rows + + def add_thread_id_summary_txn(txn: LoggingTransaction) -> int: + min_user_id = progress.get("max_summary_user_id", "") + min_room_id = progress.get("max_summary_room_id", "") + + # Slightly overcomplicated query for getting the Nth user ID / room + # ID tuple, or the last if there are less than N remaining. + sql = """ + SELECT user_id, room_id FROM ( + SELECT user_id, room_id FROM event_push_summary + WHERE (user_id, room_id) > (?, ?) + AND thread_id IS NULL + ORDER BY user_id, room_id + LIMIT ? + ) AS e + ORDER BY user_id DESC, room_id DESC + LIMIT 1 + """ + + txn.execute(sql, (min_user_id, min_room_id, batch_size)) + row = txn.fetchone() + if not row: + return 0 + + max_user_id, max_room_id = row + + sql = """ + UPDATE event_push_summary + SET thread_id = 'main' + WHERE + (?, ?) < (user_id, room_id) AND (user_id, room_id) <= (?, ?) + AND thread_id IS NULL + """ + txn.execute(sql, (min_user_id, min_room_id, max_user_id, max_room_id)) + processed_rows = txn.rowcount + + progress["max_summary_user_id"] = max_user_id + progress["max_summary_room_id"] = max_room_id + self.db_pool.updates._background_update_progress_txn( + txn, "event_push_backfill_thread_id", progress + ) + + return processed_rows + + # First update the event_push_actions table, then the event_push_summary table. + # + # Note that the event_push_actions_staging table is ignored since it is + # assumed that items in that table will only exist for a short period of + # time. + if not event_push_actions_done: + result = await self.db_pool.runInteraction( + "event_push_backfill_thread_id", + add_thread_id_txn, + progress.get("max_event_push_actions_stream_ordering", 0), + ) + else: + result = await self.db_pool.runInteraction( + "event_push_backfill_thread_id", + add_thread_id_summary_txn, + ) + + # Only done after the event_push_summary table is done. + if not result: + await self.db_pool.updates._end_background_update( + "event_push_backfill_thread_id" + ) + + return result + async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, int]: """Get the notification count by room for a user. Only considers notifications, not highlight or unread counts, and threads are currently aggregated under their room. @@ -536,6 +711,25 @@ def _get_thread(thread_id: str) -> NotifCounts: (ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE), ) + # First ensure that the existing rows have an updated thread_id field. + if not self._event_push_backfill_thread_id_done: + txn.execute( + """ + UPDATE event_push_summary + SET thread_id = ? + WHERE room_id = ? AND user_id = ? AND thread_id is NULL + """, + (MAIN_TIMELINE, room_id, user_id), + ) + txn.execute( + """ + UPDATE event_push_actions + SET thread_id = ? + WHERE room_id = ? AND user_id = ? AND thread_id is NULL + """, + (MAIN_TIMELINE, room_id, user_id), + ) + # First we pull the counts from the summary table. # # We check that `last_receipt_stream_ordering` matches the stream ordering of the @@ -1351,6 +1545,25 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: (room_id, user_id, stream_ordering, *thread_args), ) + # First ensure that the existing rows have an updated thread_id field. + if not self._event_push_backfill_thread_id_done: + txn.execute( + """ + UPDATE event_push_summary + SET thread_id = ? + WHERE room_id = ? AND user_id = ? AND thread_id is NULL + """, + (MAIN_TIMELINE, room_id, user_id), + ) + txn.execute( + """ + UPDATE event_push_actions + SET thread_id = ? + WHERE room_id = ? AND user_id = ? AND thread_id is NULL + """, + (MAIN_TIMELINE, room_id, user_id), + ) + # Fetch the notification counts between the stream ordering of the # latest receipt and what was previously summarised. unread_counts = self._get_notif_unread_count_for_user_room( @@ -1485,6 +1698,19 @@ def _rotate_notifs_before_txn( rotate_to_stream_ordering: The new maximum event stream ordering to summarise. """ + # Ensure that any new actions have an updated thread_id. + if not self._event_push_backfill_thread_id_done: + txn.execute( + """ + UPDATE event_push_actions + SET thread_id = ? + WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL + """, + (MAIN_TIMELINE, old_rotate_stream_ordering, rotate_to_stream_ordering), + ) + + # XXX Do we need to update summaries here too? + # Calculate the new counts that should be upserted into event_push_summary sql = """ SELECT user_id, room_id, thread_id, @@ -1547,6 +1773,20 @@ def _rotate_notifs_before_txn( logger.info("Rotating notifications, handling %d rows", len(summaries)) + # Ensure that any updated threads have the proper thread_id. + if not self._event_push_backfill_thread_id_done: + txn.execute_batch( + """ + UPDATE event_push_summary + SET thread_id = ? + WHERE room_id = ? AND user_id = ? AND thread_id is NULL + """, + [ + (MAIN_TIMELINE, room_id, user_id) + for user_id, room_id, _ in summaries + ], + ) + self.db_pool.simple_upsert_many_txn( txn, table="event_push_summary", diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 72bbb3a7c2c6..d3103a6c7a05 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -95,9 +95,9 @@ SCHEMA_COMPAT_VERSION = ( - # The threads_id column must written to with non-null values event_push_actions, - # event_push_actions_staging, and event_push_summary. - 74 + # The threads_id column must exist for event_push_actions, event_push_summary, + # receipts_linearized, and receipts_graph. + 73 ) """Limit on how far the synapse codebase can be rolled back without breaking db compat diff --git a/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql b/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql deleted file mode 100644 index ce6f9ff93748..000000000000 --- a/synapse/storage/schema/main/delta/74/02thread_notifications_backfill.sql +++ /dev/null @@ -1,28 +0,0 @@ -/* Copyright 2023 The Matrix.org Foundation C.I.C - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --- Force the background updates from 06thread_notifications.sql to run in the --- foreground as code will now require those to be "done". - -DELETE FROM background_updates WHERE update_name = 'event_push_backfill_thread_id'; - --- Overwrite any null thread_id values. -UPDATE event_push_actions_staging SET thread_id = 'main' WHERE thread_id IS NULL; -UPDATE event_push_actions SET thread_id = 'main' WHERE thread_id IS NULL; -UPDATE event_push_summary SET thread_id = 'main' WHERE thread_id IS NULL; - --- Drop the background updates to calculate the indexes used to find null thread_ids. -DELETE FROM background_updates WHERE update_name = 'event_push_actions_thread_id_null'; -DELETE FROM background_updates WHERE update_name = 'event_push_summary_thread_id_null'; diff --git a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres deleted file mode 100644 index 5f6866742543..000000000000 --- a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.postgres +++ /dev/null @@ -1,23 +0,0 @@ -/* Copyright 2022 The Matrix.org Foundation C.I.C - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --- Drop the indexes used to find null thread_ids. -DROP INDEX IF EXISTS event_push_actions_thread_id_null; -DROP INDEX IF EXISTS event_push_summary_thread_id_null; - --- The thread_id columns can now be made non-nullable. -ALTER TABLE event_push_actions_staging ALTER COLUMN thread_id SET NOT NULL; -ALTER TABLE event_push_actions ALTER COLUMN thread_id SET NOT NULL; -ALTER TABLE event_push_summary ALTER COLUMN thread_id SET NOT NULL; diff --git a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite b/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite deleted file mode 100644 index f46b2335605e..000000000000 --- a/synapse/storage/schema/main/delta/74/03thread_notifications_not_null.sql.sqlite +++ /dev/null @@ -1,99 +0,0 @@ -/* Copyright 2022 The Matrix.org Foundation C.I.C - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -- The thread_id columns can now be made non-nullable. --- --- SQLite doesn't support modifying columns to an existing table, so it must --- be recreated. - --- Create the new tables. -CREATE TABLE event_push_actions_staging_new ( - event_id TEXT NOT NULL, - user_id TEXT NOT NULL, - actions TEXT NOT NULL, - notif SMALLINT NOT NULL, - highlight SMALLINT NOT NULL, - unread SMALLINT, - thread_id TEXT NOT NULL, - inserted_ts BIGINT -); - -CREATE TABLE event_push_actions_new ( - room_id TEXT NOT NULL, - event_id TEXT NOT NULL, - user_id TEXT NOT NULL, - profile_tag VARCHAR(32), - actions TEXT NOT NULL, - topological_ordering BIGINT, - stream_ordering BIGINT, - notif SMALLINT, - highlight SMALLINT, - unread SMALLINT, - thread_id TEXT NOT NULL, - CONSTRAINT event_id_user_id_profile_tag_uniqueness UNIQUE (room_id, event_id, user_id, profile_tag) -); - -CREATE TABLE event_push_summary_new ( - user_id TEXT NOT NULL, - room_id TEXT NOT NULL, - notif_count BIGINT NOT NULL, - stream_ordering BIGINT NOT NULL, - unread_count BIGINT, - last_receipt_stream_ordering BIGINT, - thread_id TEXT NOT NULL -); - --- Copy the data. -INSERT INTO event_push_actions_staging_new (event_id, user_id, actions, notif, highlight, unread, thread_id, inserted_ts) - SELECT event_id, user_id, actions, notif, highlight, unread, thread_id, inserted_ts - FROM event_push_actions_staging; - -INSERT INTO event_push_actions_new (room_id, event_id, user_id, profile_tag, actions, topological_ordering, stream_ordering, notif, highlight, unread, thread_id) - SELECT room_id, event_id, user_id, profile_tag, actions, topological_ordering, stream_ordering, notif, highlight, unread, thread_id - FROM event_push_actions; - -INSERT INTO event_push_summary_new (user_id, room_id, notif_count, stream_ordering, unread_count, last_receipt_stream_ordering, thread_id) - SELECT user_id, room_id, notif_count, stream_ordering, unread_count, last_receipt_stream_ordering, thread_id - FROM event_push_summary; - --- Drop the old tables. -DROP TABLE event_push_actions_staging; -DROP TABLE event_push_actions; -DROP TABLE event_push_summary; - --- Rename the tables. -ALTER TABLE event_push_actions_staging_new RENAME TO event_push_actions_staging; -ALTER TABLE event_push_actions_new RENAME TO event_push_actions; -ALTER TABLE event_push_summary_new RENAME TO event_push_summary; - --- Recreate the indexes. -CREATE INDEX event_push_actions_staging_id ON event_push_actions_staging(event_id); - -CREATE INDEX event_push_actions_highlights_index ON event_push_actions (user_id, room_id, topological_ordering, stream_ordering); -CREATE INDEX event_push_actions_rm_tokens on event_push_actions( user_id, room_id, topological_ordering, stream_ordering ); -CREATE INDEX event_push_actions_room_id_user_id on event_push_actions(room_id, user_id); -CREATE INDEX event_push_actions_stream_ordering on event_push_actions( stream_ordering, user_id ); -CREATE INDEX event_push_actions_u_highlight ON event_push_actions (user_id, stream_ordering); - -CREATE UNIQUE INDEX event_push_summary_unique_index2 ON event_push_summary (user_id, room_id, thread_id) ; - --- Recreate some indexes in the background, by re-running the background updates --- from 72/02event_push_actions_index.sql and 72/06thread_notifications.sql. -INSERT INTO background_updates (ordering, update_name, progress_json) VALUES - (7403, 'event_push_summary_unique_index2', '{}') - ON CONFLICT (update_name) DO UPDATE SET progress_json = '{}'; -INSERT INTO background_updates (ordering, update_name, progress_json) VALUES - (7403, 'event_push_actions_stream_highlight_index', '{}') - ON CONFLICT (update_name) DO UPDATE SET progress_json = '{}'; From 6204c3663eabec57e897e7e75180b959a936e1fe Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Mar 2023 13:51:51 +0100 Subject: [PATCH 045/106] Revert pruning of old devices (#15360) * Revert "Fix registering a device on an account with lots of devices (#15348)" This reverts commit f0d8f66eaaacfa75bed65bc5d0c602fbc5339c85. * Revert "Delete stale non-e2e devices for users, take 3 (#15183)" This reverts commit 78cdb72cd6b0e007c314d9fed9f629dfc5b937a6. --- changelog.d/15183.misc | 1 - changelog.d/15348.misc | 1 - synapse/handlers/device.py | 2 +- synapse/handlers/register.py | 52 +------------- synapse/storage/databases/main/devices.py | 83 +---------------------- tests/handlers/test_admin.py | 2 +- tests/handlers/test_device.py | 2 +- tests/rest/client/test_register.py | 47 ------------- tests/storage/test_client_ips.py | 4 +- 9 files changed, 7 insertions(+), 187 deletions(-) delete mode 100644 changelog.d/15183.misc delete mode 100644 changelog.d/15348.misc diff --git a/changelog.d/15183.misc b/changelog.d/15183.misc deleted file mode 100644 index f9bfc581ad53..000000000000 --- a/changelog.d/15183.misc +++ /dev/null @@ -1 +0,0 @@ -Prune user's old devices on login if they have too many. diff --git a/changelog.d/15348.misc b/changelog.d/15348.misc deleted file mode 100644 index f9bfc581ad53..000000000000 --- a/changelog.d/15348.misc +++ /dev/null @@ -1 +0,0 @@ -Prune user's old devices on login if they have too many. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 0fc165a8d68c..9ded6389acdb 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -485,7 +485,7 @@ async def delete_all_devices_for_user( device_ids = [d for d in device_ids if d != except_device_id] await self.delete_devices(user_id, device_ids) - async def delete_devices(self, user_id: str, device_ids: StrCollection) -> None: + async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: """Delete several devices Args: diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7e9d065f50e1..c8bf2439afb5 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -16,7 +16,7 @@ """Contains functions for registering clients.""" import logging -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple from prometheus_client import Counter from typing_extensions import TypedDict @@ -40,7 +40,6 @@ from synapse.config.server import is_threepid_reserved from synapse.handlers.device import DeviceHandler from synapse.http.servlet import assert_params_in_dict -from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.replication.http.register import ( ReplicationPostRegisterActionsServlet, @@ -49,7 +48,6 @@ from synapse.spam_checker_api import RegistrationBehaviour from synapse.types import RoomAlias, UserID, create_requester from synapse.types.state import StateFilter -from synapse.util.iterutils import batch_iter if TYPE_CHECKING: from synapse.server import HomeServer @@ -112,10 +110,6 @@ def __init__(self, hs: "HomeServer"): self._server_notices_mxid = hs.config.servernotices.server_notices_mxid self._server_name = hs.hostname - # The set of users that we're currently pruning devices for. Ensures - # that we don't have two such jobs for the same user running at once. - self._currently_pruning_devices_for_users: Set[str] = set() - self.spam_checker = hs.get_spam_checker() if hs.config.worker.worker_app: @@ -127,10 +121,7 @@ def __init__(self, hs: "HomeServer"): ReplicationPostRegisterActionsServlet.make_client(hs) ) else: - device_handler = hs.get_device_handler() - assert isinstance(device_handler, DeviceHandler) - self.device_handler = device_handler - + self.device_handler = hs.get_device_handler() self._register_device_client = self.register_device_inner self.pusher_pool = hs.get_pusherpool() @@ -860,9 +851,6 @@ class and RegisterDeviceReplicationServlet. # This can only run on the main process. assert isinstance(self.device_handler, DeviceHandler) - # Prune the user's device list if they already have a lot of devices. - await self._maybe_prune_too_many_devices(user_id) - registered_device_id = await self.device_handler.check_device_registered( user_id, device_id, @@ -931,42 +919,6 @@ class and RegisterDeviceReplicationServlet. "refresh_token": refresh_token, } - async def _maybe_prune_too_many_devices(self, user_id: str) -> None: - """Delete any excess old devices this user may have.""" - - if user_id in self._currently_pruning_devices_for_users: - return - - # We also cap the number of users whose devices we prune at the same - # time, to avoid performance problems. - if len(self._currently_pruning_devices_for_users) > 5: - return - - device_ids = await self.store.check_too_many_devices_for_user(user_id) - if not device_ids: - return - - logger.info("Pruning %d stale devices for %s", len(device_ids), user_id) - - # Now spawn a background loop that deletes said devices. - async def _prune_too_many_devices_loop() -> None: - if user_id in self._currently_pruning_devices_for_users: - return - - self._currently_pruning_devices_for_users.add(user_id) - - try: - for batch in batch_iter(device_ids, 10): - await self.device_handler.delete_devices(user_id, batch) - - await self.clock.sleep(60) - finally: - self._currently_pruning_devices_for_users.discard(user_id) - - run_as_background_process( - "_prune_too_many_devices_loop", _prune_too_many_devices_loop - ) - async def post_registration_actions( self, user_id: str, auth_result: dict, access_token: Optional[str] ) -> None: diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index f61b7bc96eec..5503621ad613 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1599,76 +1599,6 @@ def _txn(txn: LoggingTransaction) -> int: return rows - async def check_too_many_devices_for_user(self, user_id: str) -> List[str]: - """Check if the user has a lot of devices, and if so return the set of - devices we can prune. - - This does *not* return hidden devices or devices with E2E keys. - """ - - num_devices = await self.db_pool.simple_select_one_onecol( - table="devices", - keyvalues={"user_id": user_id, "hidden": False}, - retcol="COALESCE(COUNT(*), 0)", - desc="count_devices", - ) - - # We let users have up to ten devices without pruning. - if num_devices <= 10: - return [] - - # We always prune devices not seen in the last 14 days... - max_last_seen = self._clock.time_msec() - 14 * 24 * 60 * 60 * 1000 - - # ... but we also cap the maximum number of devices the user can have to - # 50. - if num_devices > 50: - # Choose a last seen that ensures we keep at most 50 devices. - sql = """ - SELECT last_seen FROM devices - LEFT JOIN e2e_device_keys_json USING (user_id, device_id) - WHERE - user_id = ? - AND NOT hidden - AND last_seen IS NOT NULL - AND key_json IS NULL - ORDER BY last_seen DESC - LIMIT 1 - OFFSET 50 - """ - - rows = await self.db_pool.execute( - "check_too_many_devices_for_user_last_seen", - None, - sql, - user_id, - ) - if rows: - max_last_seen = max(rows[0][0], max_last_seen) - - # Fetch the devices to delete. - sql = """ - SELECT device_id FROM devices - LEFT JOIN e2e_device_keys_json USING (user_id, device_id) - WHERE - user_id = ? - AND NOT hidden - AND last_seen <= ? - AND key_json IS NULL - ORDER BY last_seen - """ - - def check_too_many_devices_for_user_txn( - txn: LoggingTransaction, - ) -> List[str]: - txn.execute(sql, (user_id, max_last_seen)) - return [device_id for device_id, in txn] - - return await self.db_pool.runInteraction( - "check_too_many_devices_for_user", - check_too_many_devices_for_user_txn, - ) - class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): # Because we have write access, this will be a StreamIdGenerator @@ -1727,7 +1657,6 @@ async def store_device( values={}, insertion_values={ "display_name": initial_device_display_name, - "last_seen": self._clock.time_msec(), "hidden": False, }, desc="store_device", @@ -1773,15 +1702,7 @@ async def store_device( ) raise StoreError(500, "Problem storing device.") - @cached(max_entries=0) - async def delete_device(self, user_id: str, device_id: str) -> None: - raise NotImplementedError() - - # Note: sometimes deleting rows out of `device_inbox` can take a long time, - # so we use a cache so that we deduplicate in flight requests to delete - # devices. - @cachedList(cached_method_name="delete_device", list_name="device_ids") - async def delete_devices(self, user_id: str, device_ids: Collection[str]) -> dict: + async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: """Deletes several devices. Args: @@ -1818,8 +1739,6 @@ def _delete_devices_txn(txn: LoggingTransaction) -> None: for device_id in device_ids: self.device_id_exists_cache.invalidate((user_id, device_id)) - return {} - async def update_device( self, user_id: str, device_id: str, new_display_name: Optional[str] = None ) -> None: diff --git a/tests/handlers/test_admin.py b/tests/handlers/test_admin.py index f0ba3775c825..5569ccef8aef 100644 --- a/tests/handlers/test_admin.py +++ b/tests/handlers/test_admin.py @@ -272,7 +272,7 @@ def test_devices(self) -> None: self.assertIn("device_id", args[0][0]) self.assertIsNone(args[0][0]["display_name"]) self.assertIsNone(args[0][0]["last_seen_user_agent"]) - self.assertEqual(args[0][0]["last_seen_ts"], 600) + self.assertIsNone(args[0][0]["last_seen_ts"]) self.assertIsNone(args[0][0]["last_seen_ip"]) def test_connections(self) -> None: diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index a456bffd632f..ce7525e29c0a 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -115,7 +115,7 @@ def test_get_devices_by_user(self) -> None: "device_id": "xyz", "display_name": "display 0", "last_seen_ip": None, - "last_seen_ts": 1000000, + "last_seen_ts": None, }, device_map["xyz"], ) diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 7ae84e3139aa..b228dba8613d 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -794,53 +794,6 @@ def test_require_approval(self) -> None: ApprovalNoticeMedium.NONE, channel.json_body["approval_notice_medium"] ) - def test_check_stale_devices_get_pruned(self) -> None: - """Check that if a user has some stale devices we log them out when they - log in a new device.""" - - # Register some devices, but not too many that we go over the threshold - # where we prune more aggressively. - user_id = self.register_user("user", "pass") - for _ in range(0, 50): - self.login(user_id, "pass") - - store = self.hs.get_datastores().main - - res = self.get_success(store.get_devices_by_user(user_id)) - self.assertEqual(len(res), 50) - - # Advance time so that the above devices are considered "old". - self.reactor.advance(30 * 24 * 60 * 60 * 1000) - - self.login(user_id, "pass") - - self.reactor.pump([60] * 10) # Ensure background job runs - - # We expect all old devices to have been logged out - res = self.get_success(store.get_devices_by_user(user_id)) - self.assertEqual(len(res), 1) - - def test_check_recent_devices_get_pruned(self) -> None: - """Check that if a user has many devices we log out the last oldest - ones. - - Note: this is similar to above, except if we lots of devices we prune - devices even if they're not old. - """ - - # Register a lot of devices in a short amount of time - user_id = self.register_user("user", "pass") - for _ in range(0, 100): - self.login(user_id, "pass") - self.reactor.advance(100) - - store = self.hs.get_datastores().main - - # We keep up to 50 devices that have been used in the last week, plus - # the device that was last logged in. - res = self.get_success(store.get_devices_by_user(user_id)) - self.assertEqual(len(res), 51) - class AccountValidityTestCase(unittest.HomeserverTestCase): servlets = [ diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index f98998653897..cd0079871ca2 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -170,8 +170,6 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: ) ) - last_seen = self.clock.time_msec() - if after_persisting: # Trigger the storage loop self.reactor.advance(10) @@ -192,7 +190,7 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: "device_id": device_id, "ip": None, "user_agent": None, - "last_seen": last_seen, + "last_seen": None, }, ], ) From 157092d97ad14f79878ae90009b9c60489679a8f Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Fri, 31 Mar 2023 20:20:40 +0300 Subject: [PATCH 046/106] Fix copyright year in SSO footer template (#15358) --- changelog.d/15358.misc | 1 + synapse/res/templates/sso_footer.html | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15358.misc diff --git a/changelog.d/15358.misc b/changelog.d/15358.misc new file mode 100644 index 000000000000..daf261a9f16d --- /dev/null +++ b/changelog.d/15358.misc @@ -0,0 +1 @@ +Fix copyright year in SSO footer template. diff --git a/synapse/res/templates/sso_footer.html b/synapse/res/templates/sso_footer.html index b46e0d83fe13..fdcb206c320d 100644 --- a/synapse/res/templates/sso_footer.html +++ b/synapse/res/templates/sso_footer.html @@ -15,5 +15,5 @@ -

An open network for secure, decentralized communication.
© 2022 The Matrix.org Foundation C.I.C.

- \ No newline at end of file +

An open network for secure, decentralized communication.
© 2023 The Matrix.org Foundation C.I.C.

+ From 675ff0d5d0481051358dbccf81e630b97ef67e47 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 08:08:28 +0100 Subject: [PATCH 047/106] Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3 (#15369) * Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3 Bumps [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages) from 3.9.2 to 3.9.3. - [Release notes](https://github.com/peaceiris/actions-gh-pages/releases) - [Changelog](https://github.com/peaceiris/actions-gh-pages/blob/main/CHANGELOG.md) - [Commits](https://github.com/peaceiris/actions-gh-pages/compare/bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7...373f7f263a76c20808c831209c920827a82a2847) --- updated-dependencies: - dependency-name: peaceiris/actions-gh-pages dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/docs.yaml | 4 ++-- changelog.d/15369.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15369.misc diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 7443fd70d45f..2bd0f3256602 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -67,7 +67,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2 + uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book @@ -97,7 +97,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2 + uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./dev-docs/_build/html diff --git a/changelog.d/15369.misc b/changelog.d/15369.misc new file mode 100644 index 000000000000..da990285cbab --- /dev/null +++ b/changelog.d/15369.misc @@ -0,0 +1 @@ +Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. From 84b06fc89346dcff7e412b05f2e9bed272082cc9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 08:08:38 +0100 Subject: [PATCH 048/106] Bump serde from 1.0.158 to 1.0.159 (#15370) * Bump serde from 1.0.158 to 1.0.159 Bumps [serde](https://github.com/serde-rs/serde) from 1.0.158 to 1.0.159. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.158...v1.0.159) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 8 ++++---- changelog.d/15370.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15370.misc diff --git a/Cargo.lock b/Cargo.lock index 68246b9b28a2..2126cb008730 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.158" +version = "1.0.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" +checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.158" +version = "1.0.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" +checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/15370.misc b/changelog.d/15370.misc new file mode 100644 index 000000000000..23a18eaf07b8 --- /dev/null +++ b/changelog.d/15370.misc @@ -0,0 +1 @@ +Bump serde from 1.0.158 to 1.0.159. From 9b2ab506c5b9b07eb5f5b1df55d52874e3a7be17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 08:08:46 +0100 Subject: [PATCH 049/106] Bump serde_json from 1.0.94 to 1.0.95 (#15371) * Bump serde_json from 1.0.94 to 1.0.95 Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.94 to 1.0.95. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.94...v1.0.95) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 4 ++-- changelog.d/15371.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15371.misc diff --git a/Cargo.lock b/Cargo.lock index 2126cb008730..4a2c6af8f317 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -343,9 +343,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" +checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744" dependencies = [ "itoa", "ryu", diff --git a/changelog.d/15371.misc b/changelog.d/15371.misc new file mode 100644 index 000000000000..c21deede05a6 --- /dev/null +++ b/changelog.d/15371.misc @@ -0,0 +1 @@ +Bump serde_json from 1.0.94 to 1.0.95. From 56efa9b1672abcdf1f42bebcc3c7dd0b4fa40067 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Mon, 3 Apr 2023 04:27:51 -0500 Subject: [PATCH 050/106] Experimental Unix socket support (#15353) * Add IReactorUNIX to ISynapseReactor type hint. * Create listen_unix(). Two options, 'path' to the file and 'mode' of permissions(not umask, recommend 666 as default as nginx/other reverse proxies write to it and it's setup as user www-data) For the moment, leave the option to always create a PID lockfile turned on by default * Create UnixListenerConfig and wire it up. Rename ListenerConfig to TCPListenerConfig, then Union them together into ListenerConfig. This spidered around a bit, but I think I got it all. Metrics and manhole have been placed behind a conditional in case of accidental putting them onto a unix socket. Use new helpers to get if a listener is configured for TLS, and to help create a site tag for logging. There are 2 TODO things in parse_listener_def() to finish up at a later point. * Refactor SynapseRequest to handle logging correctly when using a unix socket. This prevents an exception when an IP address can not be retrieved for a request. * Make the 'Synapse now listening on Unix socket' log line a little prettier. * No silent failures on generic workers when trying to use a unix socket with metrics or manhole. * Inline variables in app/_base.py * Update docstring for listen_unix() to remove reference to a hardcoded permission of 0o666 and add a few comments saying where the default IS declared. * Disallow both a unix socket and a ip/port combo on the same listener resource * Linting * Changelog * review: simplify how listen_unix returns(and get rid of a type: ignore) * review: fix typo from ConfigError in app/homeserver.py * review: roll conditional for http_options.tag into get_site_tag() helper(and add docstring) * review: enhance the conditionals for checking if a port or path is valid, remove a TODO line * review: Try updating comment in get_client_ip_if_available to clarify what is being retrieved and why * Pretty up how 'Synapse now listening on Unix Socket' looks by decoding the byte string. * review: In parse_listener_def(), raise ConfigError if neither socket_path nor port is declared(and fix a typo) --- changelog.d/15353.misc | 1 + synapse/app/_base.py | 92 ++++++++++++++++++-------- synapse/app/generic_worker.py | 34 ++++++---- synapse/app/homeserver.py | 42 +++++++----- synapse/config/server.py | 118 ++++++++++++++++++++++++++-------- synapse/config/workers.py | 13 ++-- synapse/http/site.py | 27 +++++++- synapse/types/__init__.py | 2 + 8 files changed, 239 insertions(+), 90 deletions(-) create mode 100644 changelog.d/15353.misc diff --git a/changelog.d/15353.misc b/changelog.d/15353.misc new file mode 100644 index 000000000000..23927fea8ff2 --- /dev/null +++ b/changelog.d/15353.misc @@ -0,0 +1 @@ +Add experimental support for Unix sockets. Contributed by Jason Little. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 28062dd69d3a..f7b866978cca 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -41,7 +41,12 @@ import twisted from twisted.internet import defer, error, reactor as _reactor -from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP +from twisted.internet.interfaces import ( + IOpenSSLContextFactory, + IReactorSSL, + IReactorTCP, + IReactorUNIX, +) from twisted.internet.protocol import ServerFactory from twisted.internet.tcp import Port from twisted.logger import LoggingFile, LogLevel @@ -56,7 +61,7 @@ from synapse.config import ConfigError from synapse.config._base import format_config_error from synapse.config.homeserver import HomeServerConfig -from synapse.config.server import ListenerConfig, ManholeConfig +from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig from synapse.crypto import context_factory from synapse.events.presence_router import load_legacy_presence_router from synapse.events.spamcheck import load_legacy_spam_checkers @@ -351,6 +356,28 @@ def listen_tcp( return r # type: ignore[return-value] +def listen_unix( + path: str, + mode: int, + factory: ServerFactory, + reactor: IReactorUNIX = reactor, + backlog: int = 50, +) -> List[Port]: + """ + Create a UNIX socket for a given path and 'mode' permission + + Returns: + list of twisted.internet.tcp.Port listening for TCP connections + """ + wantPID = True + + return [ + # IReactorUNIX returns an object implementing IListeningPort from listenUNIX, + # but we know it will be a Port instance. + cast(Port, reactor.listenUNIX(path, factory, backlog, mode, wantPID)) + ] + + def listen_http( listener_config: ListenerConfig, root_resource: Resource, @@ -359,18 +386,13 @@ def listen_http( context_factory: Optional[IOpenSSLContextFactory], reactor: ISynapseReactor = reactor, ) -> List[Port]: - port = listener_config.port - bind_addresses = listener_config.bind_addresses - tls = listener_config.tls - assert listener_config.http_options is not None - site_tag = listener_config.http_options.tag - if site_tag is None: - site_tag = str(port) + site_tag = listener_config.get_site_tag() site = SynapseSite( - "synapse.access.%s.%s" % ("https" if tls else "http", site_tag), + "synapse.access.%s.%s" + % ("https" if listener_config.is_tls() else "http", site_tag), site_tag, listener_config, root_resource, @@ -378,25 +400,41 @@ def listen_http( max_request_body_size=max_request_body_size, reactor=reactor, ) - if tls: - # refresh_certificate should have been called before this. - assert context_factory is not None - ports = listen_ssl( - bind_addresses, - port, - site, - context_factory, - reactor=reactor, - ) - logger.info("Synapse now listening on TCP port %d (TLS)", port) + + if isinstance(listener_config, TCPListenerConfig): + if listener_config.is_tls(): + # refresh_certificate should have been called before this. + assert context_factory is not None + ports = listen_ssl( + listener_config.bind_addresses, + listener_config.port, + site, + context_factory, + reactor=reactor, + ) + logger.info( + "Synapse now listening on TCP port %d (TLS)", listener_config.port + ) + else: + ports = listen_tcp( + listener_config.bind_addresses, + listener_config.port, + site, + reactor=reactor, + ) + logger.info("Synapse now listening on TCP port %d", listener_config.port) + else: - ports = listen_tcp( - bind_addresses, - port, - site, - reactor=reactor, + ports = listen_unix( + listener_config.path, listener_config.mode, site, reactor=reactor ) - logger.info("Synapse now listening on TCP port %d", port) + # getHost() returns a UNIXAddress which contains an instance variable of 'name' + # encoded as a byte string. Decode as utf-8 so pretty. + logger.info( + "Synapse now listening on Unix Socket at: " + f"{ports[0].getHost().name.decode('utf-8')}" + ) + return ports diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 0dec24369a49..e17ce35b8e29 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -38,7 +38,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging -from synapse.config.server import ListenerConfig +from synapse.config.server import ListenerConfig, TCPListenerConfig from synapse.federation.transport.server import TransportLayerServer from synapse.http.server import JsonResource, OptionsResource from synapse.logging.context import LoggingContext @@ -236,12 +236,18 @@ def start_listening(self) -> None: if listener.type == "http": self._listen_http(listener) elif listener.type == "manhole": - _base.listen_manhole( - listener.bind_addresses, - listener.port, - manhole_settings=self.config.server.manhole_settings, - manhole_globals={"hs": self}, - ) + if isinstance(listener, TCPListenerConfig): + _base.listen_manhole( + listener.bind_addresses, + listener.port, + manhole_settings=self.config.server.manhole_settings, + manhole_globals={"hs": self}, + ) + else: + raise ConfigError( + "Can not using a unix socket for manhole at this time." + ) + elif listener.type == "metrics": if not self.config.metrics.enable_metrics: logger.warning( @@ -249,10 +255,16 @@ def start_listening(self) -> None: "enable_metrics is not True!" ) else: - _base.listen_metrics( - listener.bind_addresses, - listener.port, - ) + if isinstance(listener, TCPListenerConfig): + _base.listen_metrics( + listener.bind_addresses, + listener.port, + ) + else: + raise ConfigError( + "Can not use a unix socket for metrics at this time." + ) + else: logger.warning("Unsupported listener type: %s", listener.type) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b8830b1a9c16..84236ac299e0 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -44,7 +44,7 @@ ) from synapse.config._base import ConfigError, format_config_error from synapse.config.homeserver import HomeServerConfig -from synapse.config.server import ListenerConfig +from synapse.config.server import ListenerConfig, TCPListenerConfig from synapse.federation.transport.server import TransportLayerServer from synapse.http.additional_resource import AdditionalResource from synapse.http.server import ( @@ -78,14 +78,13 @@ class SynapseHomeServer(HomeServer): DATASTORE_CLASS = DataStore # type: ignore def _listener_http( - self, config: HomeServerConfig, listener_config: ListenerConfig + self, + config: HomeServerConfig, + listener_config: ListenerConfig, ) -> Iterable[Port]: - port = listener_config.port # Must exist since this is an HTTP listener. assert listener_config.http_options is not None - site_tag = listener_config.http_options.tag - if site_tag is None: - site_tag = str(port) + site_tag = listener_config.get_site_tag() # We always include a health resource. resources: Dict[str, Resource] = {"/health": HealthResource()} @@ -252,12 +251,17 @@ def start_listening(self) -> None: self._listener_http(self.config, listener) ) elif listener.type == "manhole": - _base.listen_manhole( - listener.bind_addresses, - listener.port, - manhole_settings=self.config.server.manhole_settings, - manhole_globals={"hs": self}, - ) + if isinstance(listener, TCPListenerConfig): + _base.listen_manhole( + listener.bind_addresses, + listener.port, + manhole_settings=self.config.server.manhole_settings, + manhole_globals={"hs": self}, + ) + else: + raise ConfigError( + "Can not use a unix socket for manhole at this time." + ) elif listener.type == "metrics": if not self.config.metrics.enable_metrics: logger.warning( @@ -265,10 +269,16 @@ def start_listening(self) -> None: "enable_metrics is not True!" ) else: - _base.listen_metrics( - listener.bind_addresses, - listener.port, - ) + if isinstance(listener, TCPListenerConfig): + _base.listen_metrics( + listener.bind_addresses, + listener.port, + ) + else: + raise ConfigError( + "Can not use a unix socket for metrics at this time." + ) + else: # this shouldn't happen, as the listener type should have been checked # during parsing diff --git a/synapse/config/server.py b/synapse/config/server.py index 0e46b849cf06..386c3194b85a 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -214,17 +214,52 @@ class HttpListenerConfig: @attr.s(slots=True, frozen=True, auto_attribs=True) -class ListenerConfig: - """Object describing the configuration of a single listener.""" +class TCPListenerConfig: + """Object describing the configuration of a single TCP listener.""" port: int = attr.ib(validator=attr.validators.instance_of(int)) - bind_addresses: List[str] + bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List)) type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) tls: bool = False # http_options is only populated if type=http http_options: Optional[HttpListenerConfig] = None + def get_site_tag(self) -> str: + """Retrieves http_options.tag if it exists, otherwise the port number.""" + if self.http_options and self.http_options.tag is not None: + return self.http_options.tag + else: + return str(self.port) + + def is_tls(self) -> bool: + return self.tls + + +@attr.s(slots=True, frozen=True, auto_attribs=True) +class UnixListenerConfig: + """Object describing the configuration of a single Unix socket listener.""" + + # Note: unix sockets can not be tls encrypted, so HAVE to be behind a tls-handling + # reverse proxy + path: str = attr.ib() + # A default(0o666) for this is set in parse_listener_def() below + mode: int + type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) + + # http_options is only populated if type=http + http_options: Optional[HttpListenerConfig] = None + + def get_site_tag(self) -> str: + return "unix" + + def is_tls(self) -> bool: + """Unix sockets can't have TLS""" + return False + + +ListenerConfig = Union[TCPListenerConfig, UnixListenerConfig] + @attr.s(slots=True, frozen=True, auto_attribs=True) class ManholeConfig: @@ -531,12 +566,12 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)] - # no_tls is not really supported any more, but let's grandfather it in - # here. + # no_tls is not really supported anymore, but let's grandfather it in here. if config.get("no_tls", False): l2 = [] for listener in self.listeners: - if listener.tls: + if isinstance(listener, TCPListenerConfig) and listener.tls: + # Use isinstance() as the assertion this *has* a listener.port logger.info( "Ignoring TLS-enabled listener on port %i due to no_tls", listener.port, @@ -577,7 +612,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) self.listeners.append( - ListenerConfig( + TCPListenerConfig( port=bind_port, bind_addresses=[bind_host], tls=True, @@ -589,7 +624,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: unsecure_port = config.get("unsecure_port", bind_port - 400) if unsecure_port: self.listeners.append( - ListenerConfig( + TCPListenerConfig( port=unsecure_port, bind_addresses=[bind_host], tls=False, @@ -601,7 +636,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: manhole = config.get("manhole") if manhole: self.listeners.append( - ListenerConfig( + TCPListenerConfig( port=manhole, bind_addresses=["127.0.0.1"], type="manhole", @@ -648,7 +683,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: logger.warning(METRICS_PORT_WARNING) self.listeners.append( - ListenerConfig( + TCPListenerConfig( port=metrics_port, bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")], type="http", @@ -724,7 +759,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.delete_stale_devices_after = None def has_tls_listener(self) -> bool: - return any(listener.tls for listener in self.listeners) + return any(listener.is_tls() for listener in self.listeners) def generate_config_section( self, @@ -904,25 +939,25 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig: raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type")) port = listener.get("port") - if type(port) is not int: + socket_path = listener.get("path") + # Either a port or a path should be declared at a minimum. Using both would be bad. + if port is not None and not isinstance(port, int): raise ConfigError("Listener configuration is lacking a valid 'port' option") + if socket_path is not None and not isinstance(socket_path, str): + raise ConfigError("Listener configuration is lacking a valid 'path' option") + if port and socket_path: + raise ConfigError( + "Can not have both a UNIX socket and an IP/port declared for the same " + "resource!" + ) + if port is None and socket_path is None: + raise ConfigError( + "Must have either a UNIX socket or an IP/port declared for a given " + "resource!" + ) tls = listener.get("tls", False) - bind_addresses = listener.get("bind_addresses", []) - bind_address = listener.get("bind_address") - # if bind_address was specified, add it to the list of addresses - if bind_address: - bind_addresses.append(bind_address) - - # if we still have an empty list of addresses, use the default list - if not bind_addresses: - if listener_type == "metrics": - # the metrics listener doesn't support IPv6 - bind_addresses.append("0.0.0.0") - else: - bind_addresses.extend(DEFAULT_BIND_ADDRESSES) - http_config = None if listener_type == "http": try: @@ -932,8 +967,12 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig: except ValueError as e: raise ConfigError("Unknown listener resource") from e + # For a unix socket, default x_forwarded to True, as this is the only way of + # getting a client IP. + # Note: a reverse proxy is required anyway, as there is no way of exposing a + # unix socket to the internet. http_config = HttpListenerConfig( - x_forwarded=listener.get("x_forwarded", False), + x_forwarded=listener.get("x_forwarded", (True if socket_path else False)), resources=resources, additional_resources=listener.get("additional_resources", {}), tag=listener.get("tag"), @@ -941,7 +980,30 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig: experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False), ) - return ListenerConfig(port, bind_addresses, listener_type, tls, http_config) + if socket_path: + # TODO: Add in path validation, like if the directory exists and is writable? + # Set a default for the permission, in case it's left out + socket_mode = listener.get("mode", 0o666) + + return UnixListenerConfig(socket_path, socket_mode, listener_type, http_config) + + else: + assert port is not None + bind_addresses = listener.get("bind_addresses", []) + bind_address = listener.get("bind_address") + # if bind_address was specified, add it to the list of addresses + if bind_address: + bind_addresses.append(bind_address) + + # if we still have an empty list of addresses, use the default list + if not bind_addresses: + if listener_type == "metrics": + # the metrics listener doesn't support IPv6 + bind_addresses.append("0.0.0.0") + else: + bind_addresses.extend(DEFAULT_BIND_ADDRESSES) + + return TCPListenerConfig(port, bind_addresses, listener_type, tls, http_config) _MANHOLE_SETTINGS_SCHEMA = { diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 2580660b6c27..1dfbe27e89a2 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -19,15 +19,18 @@ import attr -from synapse.types import JsonDict - -from ._base import ( +from synapse.config._base import ( Config, ConfigError, RoutableShardedWorkerHandlingConfig, ShardedWorkerHandlingConfig, ) -from .server import DIRECT_TCP_ERROR, ListenerConfig, parse_listener_def +from synapse.config.server import ( + DIRECT_TCP_ERROR, + TCPListenerConfig, + parse_listener_def, +) +from synapse.types import JsonDict _DEPRECATED_WORKER_DUTY_OPTION_USED = """ The '%s' configuration option is deprecated and will be removed in a future @@ -161,7 +164,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: manhole = config.get("worker_manhole") if manhole: self.worker_listeners.append( - ListenerConfig( + TCPListenerConfig( port=manhole, bind_addresses=["127.0.0.1"], type="manhole", diff --git a/synapse/http/site.py b/synapse/http/site.py index 6a1dbf7f33b6..c530966ef336 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -19,6 +19,7 @@ import attr from zope.interface import implementer +from twisted.internet.address import UNIXAddress from twisted.internet.defer import Deferred from twisted.internet.interfaces import IAddress, IReactorTime from twisted.python.failure import Failure @@ -257,7 +258,7 @@ def render(self, resrc: Resource) -> None: request_id, request=ContextRequest( request_id=request_id, - ip_address=self.getClientAddress().host, + ip_address=self.get_client_ip_if_available(), site_tag=self.synapse_site.site_tag, # The requester is going to be unknown at this point. requester=None, @@ -414,7 +415,7 @@ def _started_processing(self, servlet_name: str) -> None: self.synapse_site.access_logger.debug( "%s - %s - Received request: %s %s", - self.getClientAddress().host, + self.get_client_ip_if_available(), self.synapse_site.site_tag, self.get_method(), self.get_redacted_uri(), @@ -462,7 +463,7 @@ def _finished_processing(self) -> None: "%s - %s - {%s}" " Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)" ' %sB %s "%s %s %s" "%s" [%d dbevts]', - self.getClientAddress().host, + self.get_client_ip_if_available(), self.synapse_site.site_tag, requester, processing_time, @@ -500,6 +501,26 @@ def _should_log_request(self) -> bool: return True + def get_client_ip_if_available(self) -> str: + """Logging helper. Return something useful when a client IP is not retrievable + from a unix socket. + + In practice, this returns the socket file path on a SynapseRequest if using a + unix socket and the normal IP address for TCP sockets. + + """ + # getClientAddress().host returns a proper IP address for a TCP socket. But + # unix sockets have no concept of IP addresses or ports and return a + # UNIXAddress containing a 'None' value. In order to get something usable for + # logs(where this is used) get the unix socket file. getHost() returns a + # UNIXAddress containing a value of the socket file and has an instance + # variable of 'name' encoded as a byte string containing the path we want. + # Decode to utf-8 so it looks nice. + if isinstance(self.getClientAddress(), UNIXAddress): + return self.getHost().name.decode("utf-8") + else: + return self.getClientAddress().host + class XForwardedForRequest(SynapseRequest): """Request object which honours proxy headers diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index c09b9cf87ddc..5cee9c3194f0 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -50,6 +50,7 @@ IReactorTCP, IReactorThreads, IReactorTime, + IReactorUNIX, ) from synapse.api.errors import Codes, SynapseError @@ -91,6 +92,7 @@ class ISynapseReactor( IReactorTCP, IReactorSSL, + IReactorUNIX, IReactorPluggableNameResolver, IReactorTime, IReactorCore, From cf2f2934ad6c94a269e750684d1d8170b1173b7a Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 3 Apr 2023 13:20:32 -0400 Subject: [PATCH 051/106] Call appservices on modern paths, falling back to legacy paths. (#15317) This uses the specced /_matrix/app/v1/... paths instead of the "legacy" paths. If the homeserver receives an error it will retry using the legacy path. --- changelog.d/15317.bugfix | 1 + docs/upgrade.md | 16 +++++ synapse/appservice/api.py | 133 ++++++++++++++++++++++++----------- synapse/http/client.py | 13 ++-- tests/appservice/test_api.py | 57 ++++++++++++++- 5 files changed, 172 insertions(+), 48 deletions(-) create mode 100644 changelog.d/15317.bugfix diff --git a/changelog.d/15317.bugfix b/changelog.d/15317.bugfix new file mode 100644 index 000000000000..194e4c46c64c --- /dev/null +++ b/changelog.d/15317.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). diff --git a/docs/upgrade.md b/docs/upgrade.md index f14444a400ea..1ddfc31ff603 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -88,6 +88,22 @@ process, for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` +# Upgrading to v1.81.0 + +## Application service path & authentication deprecations + +Synapse now attempts the versioned appservice paths before falling back to the +[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). +Usage of the legacy routes should be considered deprecated. + +Additionally, Synapse has supported sending the application service access token +via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization) +since v1.70.0. For backwards compatibility it is *also* sent as the `access_token` +query parameter. This is insecure and should be considered deprecated. + +A future version of Synapse (v1.88.0 or later) will remove support for legacy +application service routes and query parameter authorization. + # Upgrading to v1.80.0 ## Reporting events error code change diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index b27eedef99f2..86ddb1bb289e 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -17,6 +17,8 @@ from typing import ( TYPE_CHECKING, Any, + Awaitable, + Callable, Dict, Iterable, List, @@ -24,10 +26,11 @@ Optional, Sequence, Tuple, + TypeVar, ) from prometheus_client import Counter -from typing_extensions import TypeGuard +from typing_extensions import Concatenate, ParamSpec, TypeGuard from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind from synapse.api.errors import CodeMessageException, HttpResponseException @@ -78,7 +81,11 @@ HOUR_IN_MS = 60 * 60 * 1000 -APP_SERVICE_PREFIX = "/_matrix/app/unstable" +APP_SERVICE_PREFIX = "/_matrix/app/v1" +APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable" + +P = ParamSpec("P") +R = TypeVar("R") def _is_valid_3pe_metadata(info: JsonDict) -> bool: @@ -121,6 +128,47 @@ def __init__(self, hs: "HomeServer"): hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS ) + async def _send_with_fallbacks( + self, + service: "ApplicationService", + prefixes: List[str], + path: str, + func: Callable[Concatenate[str, P], Awaitable[R]], + *args: P.args, + **kwargs: P.kwargs, + ) -> R: + """ + Attempt to call an application service with multiple paths, falling back + until one succeeds. + + Args: + service: The appliacation service, this provides the base URL. + prefixes: A last of paths to try in order for the requests. + path: A suffix to append to each prefix. + func: The function to call, the first argument will be the full + endpoint to fetch. Other arguments are provided by args/kwargs. + + Returns: + The return value of func. + """ + for i, prefix in enumerate(prefixes, start=1): + uri = f"{service.url}{prefix}{path}" + try: + return await func(uri, *args, **kwargs) + except HttpResponseException as e: + # If an error is received that is due to an unrecognised path, + # fallback to next path (if one exists). Otherwise, consider it + # a legitimate error and raise. + if i < len(prefixes) and is_unknown_endpoint(e): + continue + raise + except Exception: + # Unexpected exceptions get sent to the caller. + raise + + # The function should always exit via the return or raise above this. + raise RuntimeError("Unexpected fallback behaviour. This should never be seen.") + async def query_user(self, service: "ApplicationService", user_id: str) -> bool: if service.url is None: return False @@ -128,10 +176,12 @@ async def query_user(self, service: "ApplicationService", user_id: str) -> bool: # This is required by the configuration. assert service.hs_token is not None - uri = service.url + ("/users/%s" % urllib.parse.quote(user_id)) try: - response = await self.get_json( - uri, + response = await self._send_with_fallbacks( + service, + [APP_SERVICE_PREFIX, ""], + f"/users/{urllib.parse.quote(user_id)}", + self.get_json, {"access_token": service.hs_token}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) @@ -140,9 +190,9 @@ async def query_user(self, service: "ApplicationService", user_id: str) -> bool: except CodeMessageException as e: if e.code == 404: return False - logger.warning("query_user to %s received %s", uri, e.code) + logger.warning("query_user to %s received %s", service.url, e.code) except Exception as ex: - logger.warning("query_user to %s threw exception %s", uri, ex) + logger.warning("query_user to %s threw exception %s", service.url, ex) return False async def query_alias(self, service: "ApplicationService", alias: str) -> bool: @@ -152,21 +202,23 @@ async def query_alias(self, service: "ApplicationService", alias: str) -> bool: # This is required by the configuration. assert service.hs_token is not None - uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias)) try: - response = await self.get_json( - uri, + response = await self._send_with_fallbacks( + service, + [APP_SERVICE_PREFIX, ""], + f"/rooms/{urllib.parse.quote(alias)}", + self.get_json, {"access_token": service.hs_token}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) if response is not None: # just an empty json object return True except CodeMessageException as e: - logger.warning("query_alias to %s received %s", uri, e.code) + logger.warning("query_alias to %s received %s", service.url, e.code) if e.code == 404: return False except Exception as ex: - logger.warning("query_alias to %s threw exception %s", uri, ex) + logger.warning("query_alias to %s threw exception %s", service.url, ex) return False async def query_3pe( @@ -188,25 +240,24 @@ async def query_3pe( # This is required by the configuration. assert service.hs_token is not None - uri = "%s%s/thirdparty/%s/%s" % ( - service.url, - APP_SERVICE_PREFIX, - kind, - urllib.parse.quote(protocol), - ) try: args: Mapping[Any, Any] = { **fields, b"access_token": service.hs_token, } - response = await self.get_json( - uri, + response = await self._send_with_fallbacks( + service, + [APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX], + f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}", + self.get_json, args=args, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) if not isinstance(response, list): logger.warning( - "query_3pe to %s returned an invalid response %r", uri, response + "query_3pe to %s returned an invalid response %r", + service.url, + response, ) return [] @@ -216,12 +267,12 @@ async def query_3pe( ret.append(r) else: logger.warning( - "query_3pe to %s returned an invalid result %r", uri, r + "query_3pe to %s returned an invalid result %r", service.url, r ) return ret except Exception as ex: - logger.warning("query_3pe to %s threw exception %s", uri, ex) + logger.warning("query_3pe to %s threw exception %s", service.url, ex) return [] async def get_3pe_protocol( @@ -233,21 +284,20 @@ async def get_3pe_protocol( async def _get() -> Optional[JsonDict]: # This is required by the configuration. assert service.hs_token is not None - uri = "%s%s/thirdparty/protocol/%s" % ( - service.url, - APP_SERVICE_PREFIX, - urllib.parse.quote(protocol), - ) try: - info = await self.get_json( - uri, + info = await self._send_with_fallbacks( + service, + [APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX], + f"/thirdparty/protocol/{urllib.parse.quote(protocol)}", + self.get_json, {"access_token": service.hs_token}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) if not _is_valid_3pe_metadata(info): logger.warning( - "query_3pe_protocol to %s did not return a valid result", uri + "query_3pe_protocol to %s did not return a valid result", + service.url, ) return None @@ -260,7 +310,9 @@ async def _get() -> Optional[JsonDict]: return info except Exception as ex: - logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex) + logger.warning( + "query_3pe_protocol to %s threw exception %s", service.url, ex + ) return None key = (service.id, protocol) @@ -274,7 +326,7 @@ async def ping(self, service: "ApplicationService", txn_id: Optional[str]) -> No assert service.hs_token is not None await self.post_json_get_json( - uri=service.url + "/_matrix/app/unstable/fi.mau.msc2659/ping", + uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping", post_json={"transaction_id": txn_id}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, ) @@ -318,8 +370,6 @@ async def push_bulk( ) txn_id = 0 - uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id))) - # Never send ephemeral events to appservices that do not support it body: JsonDict = {"events": serialized_events} if service.supports_ephemeral: @@ -351,8 +401,11 @@ async def push_bulk( } try: - await self.put_json( - uri=uri, + await self._send_with_fallbacks( + service, + [APP_SERVICE_PREFIX, ""], + f"/transactions/{urllib.parse.quote(str(txn_id))}", + self.put_json, json_body=body, args={"access_token": service.hs_token}, headers={"Authorization": [f"Bearer {service.hs_token}"]}, @@ -360,7 +413,7 @@ async def push_bulk( if logger.isEnabledFor(logging.DEBUG): logger.debug( "push_bulk to %s succeeded! events=%s", - uri, + service.url, [event.get("event_id") for event in events], ) sent_transactions_counter.labels(service.id).inc() @@ -371,7 +424,7 @@ async def push_bulk( except CodeMessageException as e: logger.warning( "push_bulk to %s received code=%s msg=%s", - uri, + service.url, e.code, e.msg, exc_info=logger.isEnabledFor(logging.DEBUG), @@ -379,7 +432,7 @@ async def push_bulk( except Exception as ex: logger.warning( "push_bulk to %s threw exception(%s) %s args=%s", - uri, + service.url, type(ex).__name__, ex, ex.args, diff --git a/synapse/http/client.py b/synapse/http/client.py index 5ee55981d9f8..b5cf8123ce84 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -982,20 +982,21 @@ def is_unknown_endpoint( """ if synapse_error is None: synapse_error = e.to_synapse_error() - # MSC3743 specifies that servers should return a 404 or 405 with an errcode + + # Matrix v1.6 specifies that servers should return a 404 or 405 with an errcode # of M_UNRECOGNIZED when they receive a request to an unknown endpoint or # to an unknown method, respectively. # - # Older versions of servers don't properly handle this. This needs to be - # rather specific as some endpoints truly do return 404 errors. + # Older versions of servers don't return proper errors, so be graceful. But, + # also handle that some endpoints truly do return 404 errors. return ( # 404 is an unknown endpoint, 405 is a known endpoint, but unknown method. (e.code == 404 or e.code == 405) and ( - # Older Dendrites returned a text body or empty body. - # Older Conduit returned an empty body. + # Consider empty body or non-JSON bodies to be unrecognised (matches + # older Dendrites & Conduits). not e.response - or e.response == b"404 page not found" + or not e.response.startswith(b"{") # The proper response JSON with M_UNRECOGNIZED errcode. or synapse_error.errcode == Codes.UNRECOGNIZED ) diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 0dd02b7d5839..7deb923a280d 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -16,6 +16,7 @@ from twisted.test.proto_helpers import MemoryReactor +from synapse.api.errors import HttpResponseException from synapse.appservice import ApplicationService from synapse.server import HomeServer from synapse.types import JsonDict @@ -64,8 +65,8 @@ def test_query_3pe_authenticates_token(self) -> None: } ] - URL_USER = f"{URL}/_matrix/app/unstable/thirdparty/user/{PROTOCOL}" - URL_LOCATION = f"{URL}/_matrix/app/unstable/thirdparty/location/{PROTOCOL}" + URL_USER = f"{URL}/_matrix/app/v1/thirdparty/user/{PROTOCOL}" + URL_LOCATION = f"{URL}/_matrix/app/v1/thirdparty/location/{PROTOCOL}" self.request_url = None @@ -106,6 +107,58 @@ async def get_json( self.assertEqual(self.request_url, URL_LOCATION) self.assertEqual(result, SUCCESS_RESULT_LOCATION) + def test_fallback(self) -> None: + """ + Tests that the fallback to legacy URLs works. + """ + SUCCESS_RESULT_USER = [ + { + "protocol": PROTOCOL, + "userid": "@a:user", + "fields": { + "more": "fields", + }, + } + ] + + URL_USER = f"{URL}/_matrix/app/v1/thirdparty/user/{PROTOCOL}" + FALLBACK_URL_USER = f"{URL}/_matrix/app/unstable/thirdparty/user/{PROTOCOL}" + + self.request_url = None + self.v1_seen = False + + async def get_json( + url: str, + args: Mapping[Any, Any], + headers: Mapping[Union[str, bytes], Sequence[Union[str, bytes]]], + ) -> List[JsonDict]: + # Ensure the access token is passed as both a header and query arg. + if not headers.get("Authorization") or not args.get(b"access_token"): + raise RuntimeError("Access token not provided") + + self.assertEqual(headers.get("Authorization"), [f"Bearer {TOKEN}"]) + self.assertEqual(args.get(b"access_token"), TOKEN) + self.request_url = url + if url == URL_USER: + self.v1_seen = True + raise HttpResponseException(404, "NOT_FOUND", b"NOT_FOUND") + elif url == FALLBACK_URL_USER: + return SUCCESS_RESULT_USER + else: + raise RuntimeError( + "URL provided was invalid. This should never be seen." + ) + + # We assign to a method, which mypy doesn't like. + self.api.get_json = Mock(side_effect=get_json) # type: ignore[assignment] + + result = self.get_success( + self.api.query_3pe(self.service, "user", PROTOCOL, {b"some": [b"field"]}) + ) + self.assertTrue(self.v1_seen) + self.assertEqual(self.request_url, FALLBACK_URL_USER) + self.assertEqual(result, SUCCESS_RESULT_USER) + def test_claim_keys(self) -> None: """ Tests that the /keys/claim response is properly parsed for missing From 8aa121c2be699af439f48af785c23cda189c9198 Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Mon, 3 Apr 2023 19:37:26 +0100 Subject: [PATCH 052/106] Build Debian packages for Ubuntu 23.04 Lunar Lobster (#15381) Signed-off-by: Sean Quah --- changelog.d/15381.feature | 1 + scripts-dev/build_debian_packages.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/15381.feature diff --git a/changelog.d/15381.feature b/changelog.d/15381.feature new file mode 100644 index 000000000000..ba5b578564e9 --- /dev/null +++ b/changelog.d/15381.feature @@ -0,0 +1 @@ +Build Debian packages for Ubuntu 23.04 (Lunar Lobster). diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index 744230019610..ede766501100 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -28,6 +28,7 @@ "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) "ubuntu:jammy", # 22.04 LTS (EOL 2027-04) "ubuntu:kinetic", # 22.10 (EOL 2023-07-20) + "ubuntu:lunar", # 23.04 (EOL 2024-01) ) DESC = """\ From c0772b4461ff396b6a09b4a17368d61a2810c452 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 4 Apr 2023 13:08:17 +0100 Subject: [PATCH 053/106] Note that Synapse 1.74 queued a user dir rebuild (#15386) * Note that Synapse 1.74 queued a user dir rebuild * Changelog --- changelog.d/15386.doc | 1 + docs/upgrade.md | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 100644 changelog.d/15386.doc diff --git a/changelog.d/15386.doc b/changelog.d/15386.doc new file mode 100644 index 000000000000..5c032193aeef --- /dev/null +++ b/changelog.d/15386.doc @@ -0,0 +1 @@ +Note that Synapse 1.74 queued a rebuild of the user directory tables. diff --git a/docs/upgrade.md b/docs/upgrade.md index 1ddfc31ff603..0886b0311571 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -199,6 +199,17 @@ Docker images and Debian packages need nothing specific as they already include or specify ICU as an explicit dependency. +## User directory rebuild + +Synapse 1.74 queues a background update +[to rebuild the user directory](https://github.com/matrix-org/synapse/pull/14643), +in order to fix missing or erroneous entries. + +When this update begins, the user directory will be cleared out and rebuilt from +scratch. User directory lookups will be incomplete until the rebuild completes. +Admins can monitor the rebuild's progress by using the +[Background update Admin API](usage/administration/admin_api/background_updates.md#status). + # Upgrading to v1.73.0 ## Legacy Prometheus metric names have now been removed From 89a71e73905ffa1c97ae8be27d521cd2ef3f3a0c Mon Sep 17 00:00:00 2001 From: Sean Quah <8349537+squahtx@users.noreply.github.com> Date: Tue, 4 Apr 2023 13:10:25 +0100 Subject: [PATCH 054/106] Fix a rare bug where initial /syncs would fail (#15383) This change fixes a rare bug where initial /syncs would fail with a `KeyError` under the following circumstances: 1. A user fast joins a remote room. 2. The user is kicked from the room before the room's full state has been synced. 3. A second local user fast joins the room. 4. Events are backfilled into the room with a higher topological ordering than the original user's leave. They are assigned a negative stream ordering. It's not clear how backfill happened here, since it is expected to be equivalent to syncing the full state. 5. The second local user leaves the room before the room's full state has been synced. The homeserver does not complete the sync. 6. The original user performs an initial /sync with lazy_load_members enabled. * Because they were kicked from the room, the room is included in the /sync response even though the include_leave option is not specified. * To populate the room's timeline, `_load_filtered_recents` / `get_recent_events_for_room` fetches events with a lower stream ordering than the leave event and picks the ones with the highest topological orderings (which are most recent). This captures the backfilled events after the leave, since they have a negative stream ordering. These events are filtered out of the timeline, since the user was not in the room at the time and cannot view them. The sync code ends up with an empty timeline for the room that notably does not include the user's leave event. This seems buggy, but at least we don't disclose events the user isn't allowed to see. * Normally, `compute_state_delta` would fetch the state at the start and end of the room's timeline to generate the sync response. Since the timeline is empty, it fetches the state at `min(now, last event in the room)`, which corresponds with the second user's leave. The state during the entirety of the second user's membership does not include the membership for the first user because of partial state. This part is also questionable, since we are fetching state from outside the bounds of the user's membership. * `compute_state_delta` then tries and fails to find the user's membership in the auth events of timeline events. Because there is no timeline event whose auth events are expected to contain the user's membership, a `KeyError` is raised. Also contains a drive-by fix for a separate unlikely race condition. Signed-off-by: Sean Quah --- changelog.d/15383.bugfix | 1 + synapse/handlers/sync.py | 24 +++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 changelog.d/15383.bugfix diff --git a/changelog.d/15383.bugfix b/changelog.d/15383.bugfix new file mode 100644 index 000000000000..28c66ef45461 --- /dev/null +++ b/changelog.d/15383.bugfix @@ -0,0 +1 @@ +Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9f5b83ed5492..64d298408d21 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -943,6 +943,8 @@ async def compute_state_delta( timeline_state = {} + # Membership events to fetch that can be found in the room state, or in + # the case of partial state rooms, the auth events of timeline events. members_to_fetch = set() first_event_by_sender_map = {} for event in batch.events: @@ -964,9 +966,19 @@ async def compute_state_delta( # (if we are) to fix https://github.com/vector-im/riot-web/issues/7209 # We only need apply this on full state syncs given we disabled # LL for incr syncs in #3840. - members_to_fetch.add(sync_config.user.to_string()) - - state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch) + # We don't insert ourselves into `members_to_fetch`, because in some + # rare cases (an empty event batch with a now_token after the user's + # leave in a partial state room which another local user has + # joined), the room state will be missing our membership and there + # is no guarantee that our membership will be in the auth events of + # timeline events when the room is partial stated. + state_filter = StateFilter.from_lazy_load_member_list( + members_to_fetch.union((sync_config.user.to_string(),)) + ) + else: + state_filter = StateFilter.from_lazy_load_member_list( + members_to_fetch + ) # We are happy to use partial state to compute the `/sync` response. # Since partial state may not include the lazy-loaded memberships we @@ -988,7 +1000,9 @@ async def compute_state_delta( # sync's timeline and the start of the current sync's timeline. # See the docstring above for details. state_ids: StateMap[str] - + # We need to know whether the state we fetch may be partial, so check + # whether the room is partial stated *before* fetching it. + is_partial_state_room = await self.store.is_partial_state_room(room_id) if full_state: if batch: state_at_timeline_end = ( @@ -1119,7 +1133,7 @@ async def compute_state_delta( # If we only have partial state for the room, `state_ids` may be missing the # memberships we wanted. We attempt to find some by digging through the auth # events of timeline events. - if lazy_load_members and await self.store.is_partial_state_room(room_id): + if lazy_load_members and is_partial_state_room: assert members_to_fetch is not None assert first_event_by_sender_map is not None From 79d2e2e79c97b21a4b3b786594d0d9ebebd33964 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Apr 2023 14:11:34 +0100 Subject: [PATCH 055/106] Speed up membership queries for users with forgotten rooms (#15385) --- changelog.d/15385.misc | 1 + synapse/storage/databases/main/roommember.py | 12 +++++++++++- .../delta/74/03_room_membership_index.sql | 19 +++++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15385.misc create mode 100644 synapse/storage/schema/main/delta/74/03_room_membership_index.sql diff --git a/changelog.d/15385.misc b/changelog.d/15385.misc new file mode 100644 index 000000000000..76350c398d6f --- /dev/null +++ b/changelog.d/15385.misc @@ -0,0 +1 @@ +Speed up membership queries for users with forgotten rooms. diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 694a5b802c7c..daad58291a8b 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -419,7 +419,11 @@ async def get_rooms_for_local_user_where_membership_is( ) # Now we filter out forgotten and excluded rooms - rooms_to_exclude = await self.get_forgotten_rooms_for_user(user_id) + rooms_to_exclude: AbstractSet[str] = set() + + # Users can't forget joined/invited rooms, so we skip the check for such look ups. + if any(m not in (Membership.JOIN, Membership.INVITE) for m in membership_list): + rooms_to_exclude = await self.get_forgotten_rooms_for_user(user_id) if excluded_rooms is not None: # Take a copy to avoid mutating the in-cache set @@ -1391,6 +1395,12 @@ def __init__( columns=["user_id", "room_id"], where_clause="forgotten = 1", ) + self.db_pool.updates.register_background_index_update( + "room_membership_user_room_index", + index_name="room_membership_user_room_idx", + table="room_memberships", + columns=["user_id", "room_id"], + ) async def _background_add_membership_profile( self, progress: JsonDict, batch_size: int diff --git a/synapse/storage/schema/main/delta/74/03_room_membership_index.sql b/synapse/storage/schema/main/delta/74/03_room_membership_index.sql new file mode 100644 index 000000000000..81a7d9ff9cad --- /dev/null +++ b/synapse/storage/schema/main/delta/74/03_room_membership_index.sql @@ -0,0 +1,19 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Add an index to `room_membership(user_id, room_id)` to make querying for +-- forgotten rooms faster. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (7403, 'room_membership_user_room_index', '{}'); From 735e4d1f9d064ab69aa642dd989ada0cda9985f9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Apr 2023 14:29:08 +0100 Subject: [PATCH 056/106] 1.81.0rc1 --- CHANGES.md | 69 +++++++++++++++++++++++++++++++++++++++ changelog.d/14978.feature | 1 - changelog.d/15113.misc | 1 - changelog.d/15243.feature | 1 - changelog.d/15265.misc | 1 - changelog.d/15280.misc | 1 - changelog.d/15285.misc | 1 - changelog.d/15295.bugfix | 1 - changelog.d/15297.bugfix | 1 - changelog.d/15302.misc | 1 - changelog.d/15303.misc | 1 - changelog.d/15304.misc | 1 - changelog.d/15306.bugfix | 2 -- changelog.d/15309.bugfix | 1 - changelog.d/15311.misc | 1 - changelog.d/15314.feature | 1 - changelog.d/15316.misc | 1 - changelog.d/15317.bugfix | 1 - changelog.d/15319.misc | 1 - changelog.d/15321.feature | 1 - changelog.d/15323.bugfix | 1 - changelog.d/15324.misc | 1 - changelog.d/15325.misc | 1 - changelog.d/15326.misc | 1 - changelog.d/15327.misc | 1 - changelog.d/15328.misc | 1 - changelog.d/15329.misc | 1 - changelog.d/15330.misc | 1 - changelog.d/15331.feature | 1 - changelog.d/15332.bugfix | 1 - changelog.d/15334.misc | 1 - changelog.d/15336.misc | 1 - changelog.d/15339.misc | 1 - changelog.d/15340.misc | 1 - changelog.d/15341.doc | 1 - changelog.d/15349.bugfix | 1 - changelog.d/15351.bugfix | 1 - changelog.d/15352.bugfix | 1 - changelog.d/15353.misc | 1 - changelog.d/15354.misc | 1 - changelog.d/15358.misc | 1 - changelog.d/15369.misc | 1 - changelog.d/15370.misc | 1 - changelog.d/15371.misc | 1 - changelog.d/15381.feature | 1 - changelog.d/15383.bugfix | 1 - changelog.d/15385.misc | 1 - changelog.d/15386.doc | 1 - debian/changelog | 6 ++++ pyproject.toml | 2 +- 50 files changed, 76 insertions(+), 49 deletions(-) delete mode 100644 changelog.d/14978.feature delete mode 100644 changelog.d/15113.misc delete mode 100644 changelog.d/15243.feature delete mode 100644 changelog.d/15265.misc delete mode 100644 changelog.d/15280.misc delete mode 100644 changelog.d/15285.misc delete mode 100644 changelog.d/15295.bugfix delete mode 100644 changelog.d/15297.bugfix delete mode 100644 changelog.d/15302.misc delete mode 100644 changelog.d/15303.misc delete mode 100644 changelog.d/15304.misc delete mode 100644 changelog.d/15306.bugfix delete mode 100644 changelog.d/15309.bugfix delete mode 100644 changelog.d/15311.misc delete mode 100644 changelog.d/15314.feature delete mode 100644 changelog.d/15316.misc delete mode 100644 changelog.d/15317.bugfix delete mode 100644 changelog.d/15319.misc delete mode 100644 changelog.d/15321.feature delete mode 100644 changelog.d/15323.bugfix delete mode 100644 changelog.d/15324.misc delete mode 100644 changelog.d/15325.misc delete mode 100644 changelog.d/15326.misc delete mode 100644 changelog.d/15327.misc delete mode 100644 changelog.d/15328.misc delete mode 100644 changelog.d/15329.misc delete mode 100644 changelog.d/15330.misc delete mode 100644 changelog.d/15331.feature delete mode 100644 changelog.d/15332.bugfix delete mode 100644 changelog.d/15334.misc delete mode 100644 changelog.d/15336.misc delete mode 100644 changelog.d/15339.misc delete mode 100644 changelog.d/15340.misc delete mode 100644 changelog.d/15341.doc delete mode 100644 changelog.d/15349.bugfix delete mode 100644 changelog.d/15351.bugfix delete mode 100644 changelog.d/15352.bugfix delete mode 100644 changelog.d/15353.misc delete mode 100644 changelog.d/15354.misc delete mode 100644 changelog.d/15358.misc delete mode 100644 changelog.d/15369.misc delete mode 100644 changelog.d/15370.misc delete mode 100644 changelog.d/15371.misc delete mode 100644 changelog.d/15381.feature delete mode 100644 changelog.d/15383.bugfix delete mode 100644 changelog.d/15385.misc delete mode 100644 changelog.d/15386.doc diff --git a/CHANGES.md b/CHANGES.md index 5f2a4a41eb42..1f77fab96078 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,72 @@ +Synapse 1.81.0rc1 (2023-04-04) +============================== + +Features +-------- + +- Add the ability to enable/disable registrations when in the OIDC flow. ([\#14978](https://github.com/matrix-org/synapse/issues/14978)) +- Add a primitive helper script for listing worker endpoints. ([\#15243](https://github.com/matrix-org/synapse/issues/15243)) +- Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). ([\#15314](https://github.com/matrix-org/synapse/issues/15314), [\#15321](https://github.com/matrix-org/synapse/issues/15321)) +- Allow loading `/password_policy` endpoint on workers. ([\#15331](https://github.com/matrix-org/synapse/issues/15331)) +- Build Debian packages for Ubuntu 23.04 (Lunar Lobster). ([\#15381](https://github.com/matrix-org/synapse/issues/15381)) + + +Bugfixes +-------- + +- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. ([\#15295](https://github.com/matrix-org/synapse/issues/15295)) +- Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. ([\#15297](https://github.com/matrix-org/synapse/issues/15297)) +- Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite) + to ensure that the sqlite database passed to the script exists before trying to port from it. ([\#15306](https://github.com/matrix-org/synapse/issues/15306)) +- Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. ([\#15309](https://github.com/matrix-org/synapse/issues/15309)) +- Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). ([\#15317](https://github.com/matrix-org/synapse/issues/15317)) +- Fix a long-standing bug preventing users from joining rooms, that they had been unbanned from, over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323)) +- Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. ([\#15332](https://github.com/matrix-org/synapse/issues/15332)) +- Fix a long-standing bug where some to_device messages could be dropped when using workers. ([\#15349](https://github.com/matrix-org/synapse/issues/15349)) +- Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. ([\#15351](https://github.com/matrix-org/synapse/issues/15351)) +- Fix missing app variable in mail subject for password resets. Contributed by Cyberes. ([\#15352](https://github.com/matrix-org/synapse/issues/15352)) +- Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. ([\#15383](https://github.com/matrix-org/synapse/issues/15383)) + + +Improved Documentation +---------------------- + +- Fix a typo in login requests ratelimit defaults. ([\#15341](https://github.com/matrix-org/synapse/issues/15341)) +- Note that Synapse 1.74 queued a rebuild of the user directory tables. ([\#15386](https://github.com/matrix-org/synapse/issues/15386)) + + +Internal Changes +---------------- + +- Use `immutabledict` instead of `frozendict`. ([\#15113](https://github.com/matrix-org/synapse/issues/15113)) +- Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. ([\#15265](https://github.com/matrix-org/synapse/issues/15265), [\#15336](https://github.com/matrix-org/synapse/issues/15336)) +- Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. ([\#15280](https://github.com/matrix-org/synapse/issues/15280)) +- Bump sentry-sdk from 1.15.0 to 1.17.0. ([\#15285](https://github.com/matrix-org/synapse/issues/15285)) +- Allow running the Twisted trunk job against other branches. ([\#15302](https://github.com/matrix-org/synapse/issues/15302)) +- Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). ([\#15303](https://github.com/matrix-org/synapse/issues/15303)) +- Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. ([\#15304](https://github.com/matrix-org/synapse/issues/15304)) +- Reject events with an invalid "mentions" property pert [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311)) +- As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. ([\#15316](https://github.com/matrix-org/synapse/issues/15316)) +- Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. ([\#15319](https://github.com/matrix-org/synapse/issues/15319)) +- Bump serde from 1.0.157 to 1.0.158. ([\#15324](https://github.com/matrix-org/synapse/issues/15324)) +- Bump regex from 1.7.1 to 1.7.3. ([\#15325](https://github.com/matrix-org/synapse/issues/15325)) +- Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. ([\#15326](https://github.com/matrix-org/synapse/issues/15326)) +- Bump furo from 2022.12.7 to 2023.3.23. ([\#15327](https://github.com/matrix-org/synapse/issues/15327)) +- Bump ruff from 0.0.252 to 0.0.259. ([\#15328](https://github.com/matrix-org/synapse/issues/15328)) +- Bump cryptography from 40.0.0 to 40.0.1. ([\#15329](https://github.com/matrix-org/synapse/issues/15329)) +- Bump mypy-zope from 0.9.0 to 0.9.1. ([\#15330](https://github.com/matrix-org/synapse/issues/15330)) +- Speed up unit tests when using SQLite3. ([\#15334](https://github.com/matrix-org/synapse/issues/15334)) +- Speed up pydantic CI job. ([\#15339](https://github.com/matrix-org/synapse/issues/15339)) +- Speed up sample config CI job. ([\#15340](https://github.com/matrix-org/synapse/issues/15340)) +- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353)) +- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354)) +- Fix copyright year in SSO footer template. ([\#15358](https://github.com/matrix-org/synapse/issues/15358)) +- Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. ([\#15369](https://github.com/matrix-org/synapse/issues/15369)) +- Bump serde from 1.0.158 to 1.0.159. ([\#15370](https://github.com/matrix-org/synapse/issues/15370)) +- Bump serde_json from 1.0.94 to 1.0.95. ([\#15371](https://github.com/matrix-org/synapse/issues/15371)) +- Speed up membership queries for users with forgotten rooms. ([\#15385](https://github.com/matrix-org/synapse/issues/15385)) + + Synapse 1.80.0 (2023-03-28) =========================== diff --git a/changelog.d/14978.feature b/changelog.d/14978.feature deleted file mode 100644 index 14f6fee65896..000000000000 --- a/changelog.d/14978.feature +++ /dev/null @@ -1 +0,0 @@ -Add the ability to enable/disable registrations when in the OIDC flow. \ No newline at end of file diff --git a/changelog.d/15113.misc b/changelog.d/15113.misc deleted file mode 100644 index 6917dd56528b..000000000000 --- a/changelog.d/15113.misc +++ /dev/null @@ -1 +0,0 @@ -Use `immutabledict` instead of `frozendict`. diff --git a/changelog.d/15243.feature b/changelog.d/15243.feature deleted file mode 100644 index c45e974c4c7b..000000000000 --- a/changelog.d/15243.feature +++ /dev/null @@ -1 +0,0 @@ -Add a primitive helper script for listing worker endpoints. \ No newline at end of file diff --git a/changelog.d/15265.misc b/changelog.d/15265.misc deleted file mode 100644 index 355c3cae2b7f..000000000000 --- a/changelog.d/15265.misc +++ /dev/null @@ -1 +0,0 @@ -Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. \ No newline at end of file diff --git a/changelog.d/15280.misc b/changelog.d/15280.misc deleted file mode 100644 index 41d56b0cf0b4..000000000000 --- a/changelog.d/15280.misc +++ /dev/null @@ -1 +0,0 @@ -Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. diff --git a/changelog.d/15285.misc b/changelog.d/15285.misc deleted file mode 100644 index ad635ef3f29d..000000000000 --- a/changelog.d/15285.misc +++ /dev/null @@ -1 +0,0 @@ -Bump sentry-sdk from 1.15.0 to 1.17.0. diff --git a/changelog.d/15295.bugfix b/changelog.d/15295.bugfix deleted file mode 100644 index af43035383bf..000000000000 --- a/changelog.d/15295.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. diff --git a/changelog.d/15297.bugfix b/changelog.d/15297.bugfix deleted file mode 100644 index b5735fe4da85..000000000000 --- a/changelog.d/15297.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. diff --git a/changelog.d/15302.misc b/changelog.d/15302.misc deleted file mode 100644 index aadadf4b43ed..000000000000 --- a/changelog.d/15302.misc +++ /dev/null @@ -1 +0,0 @@ -Allow running the Twisted trunk job against other branches. diff --git a/changelog.d/15303.misc b/changelog.d/15303.misc deleted file mode 100644 index 977b9dcd02f8..000000000000 --- a/changelog.d/15303.misc +++ /dev/null @@ -1 +0,0 @@ -Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). diff --git a/changelog.d/15304.misc b/changelog.d/15304.misc deleted file mode 100644 index 38abb87a88bf..000000000000 --- a/changelog.d/15304.misc +++ /dev/null @@ -1 +0,0 @@ -Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. diff --git a/changelog.d/15306.bugfix b/changelog.d/15306.bugfix deleted file mode 100644 index f5eb716f12c5..000000000000 --- a/changelog.d/15306.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite) -to ensure that the sqlite database passed to the script exists before trying to port from it. diff --git a/changelog.d/15309.bugfix b/changelog.d/15309.bugfix deleted file mode 100644 index 4d3fe4e4b10b..000000000000 --- a/changelog.d/15309.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. diff --git a/changelog.d/15311.misc b/changelog.d/15311.misc deleted file mode 100644 index ce03cb95238f..000000000000 --- a/changelog.d/15311.misc +++ /dev/null @@ -1 +0,0 @@ -Reject events with an invalid "mentions" property pert [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). diff --git a/changelog.d/15314.feature b/changelog.d/15314.feature deleted file mode 100644 index 5ce0c029cee2..000000000000 --- a/changelog.d/15314.feature +++ /dev/null @@ -1 +0,0 @@ -Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). diff --git a/changelog.d/15316.misc b/changelog.d/15316.misc deleted file mode 100644 index 1f408739f0bc..000000000000 --- a/changelog.d/15316.misc +++ /dev/null @@ -1 +0,0 @@ -As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. \ No newline at end of file diff --git a/changelog.d/15317.bugfix b/changelog.d/15317.bugfix deleted file mode 100644 index 194e4c46c64c..000000000000 --- a/changelog.d/15317.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). diff --git a/changelog.d/15319.misc b/changelog.d/15319.misc deleted file mode 100644 index 339e5b347dcf..000000000000 --- a/changelog.d/15319.misc +++ /dev/null @@ -1 +0,0 @@ -Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. \ No newline at end of file diff --git a/changelog.d/15321.feature b/changelog.d/15321.feature deleted file mode 100644 index 5ce0c029cee2..000000000000 --- a/changelog.d/15321.feature +++ /dev/null @@ -1 +0,0 @@ -Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). diff --git a/changelog.d/15323.bugfix b/changelog.d/15323.bugfix deleted file mode 100644 index bc1ab35532fd..000000000000 --- a/changelog.d/15323.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug preventing users from joining rooms, that they had been unbanned from, over federation. Contributed by Nico. diff --git a/changelog.d/15324.misc b/changelog.d/15324.misc deleted file mode 100644 index 2908c82a2a4f..000000000000 --- a/changelog.d/15324.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde from 1.0.157 to 1.0.158. diff --git a/changelog.d/15325.misc b/changelog.d/15325.misc deleted file mode 100644 index 51af16f7e897..000000000000 --- a/changelog.d/15325.misc +++ /dev/null @@ -1 +0,0 @@ -Bump regex from 1.7.1 to 1.7.3. diff --git a/changelog.d/15326.misc b/changelog.d/15326.misc deleted file mode 100644 index 6238b7ff804d..000000000000 --- a/changelog.d/15326.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. diff --git a/changelog.d/15327.misc b/changelog.d/15327.misc deleted file mode 100644 index e5813f1aa985..000000000000 --- a/changelog.d/15327.misc +++ /dev/null @@ -1 +0,0 @@ -Bump furo from 2022.12.7 to 2023.3.23. diff --git a/changelog.d/15328.misc b/changelog.d/15328.misc deleted file mode 100644 index e3e595333229..000000000000 --- a/changelog.d/15328.misc +++ /dev/null @@ -1 +0,0 @@ -Bump ruff from 0.0.252 to 0.0.259. diff --git a/changelog.d/15329.misc b/changelog.d/15329.misc deleted file mode 100644 index eea38b7a42da..000000000000 --- a/changelog.d/15329.misc +++ /dev/null @@ -1 +0,0 @@ -Bump cryptography from 40.0.0 to 40.0.1. diff --git a/changelog.d/15330.misc b/changelog.d/15330.misc deleted file mode 100644 index d60e63483e1e..000000000000 --- a/changelog.d/15330.misc +++ /dev/null @@ -1 +0,0 @@ -Bump mypy-zope from 0.9.0 to 0.9.1. diff --git a/changelog.d/15331.feature b/changelog.d/15331.feature deleted file mode 100644 index b4c2eddc48ce..000000000000 --- a/changelog.d/15331.feature +++ /dev/null @@ -1 +0,0 @@ -Allow loading `/password_policy` endpoint on workers. \ No newline at end of file diff --git a/changelog.d/15332.bugfix b/changelog.d/15332.bugfix deleted file mode 100644 index ca6fb1d2fd79..000000000000 --- a/changelog.d/15332.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. diff --git a/changelog.d/15334.misc b/changelog.d/15334.misc deleted file mode 100644 index 0c30818ed015..000000000000 --- a/changelog.d/15334.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up unit tests when using SQLite3. diff --git a/changelog.d/15336.misc b/changelog.d/15336.misc deleted file mode 100644 index 39c9fc82e919..000000000000 --- a/changelog.d/15336.misc +++ /dev/null @@ -1 +0,0 @@ -Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. diff --git a/changelog.d/15339.misc b/changelog.d/15339.misc deleted file mode 100644 index 626f7ef5c007..000000000000 --- a/changelog.d/15339.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up pydantic CI job. diff --git a/changelog.d/15340.misc b/changelog.d/15340.misc deleted file mode 100644 index e2fe35d7c591..000000000000 --- a/changelog.d/15340.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up sample config CI job. diff --git a/changelog.d/15341.doc b/changelog.d/15341.doc deleted file mode 100644 index d85c0fd2c490..000000000000 --- a/changelog.d/15341.doc +++ /dev/null @@ -1 +0,0 @@ -Fix a typo in login requests ratelimit defaults. diff --git a/changelog.d/15349.bugfix b/changelog.d/15349.bugfix deleted file mode 100644 index 65ea7ae7eb1e..000000000000 --- a/changelog.d/15349.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug where some to_device messages could be dropped when using workers. diff --git a/changelog.d/15351.bugfix b/changelog.d/15351.bugfix deleted file mode 100644 index e68023c6716f..000000000000 --- a/changelog.d/15351.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. diff --git a/changelog.d/15352.bugfix b/changelog.d/15352.bugfix deleted file mode 100644 index 36d6615cac3c..000000000000 --- a/changelog.d/15352.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix missing app variable in mail subject for password resets. Contributed by Cyberes. diff --git a/changelog.d/15353.misc b/changelog.d/15353.misc deleted file mode 100644 index 23927fea8ff2..000000000000 --- a/changelog.d/15353.misc +++ /dev/null @@ -1 +0,0 @@ -Add experimental support for Unix sockets. Contributed by Jason Little. diff --git a/changelog.d/15354.misc b/changelog.d/15354.misc deleted file mode 100644 index 862444edfbaf..000000000000 --- a/changelog.d/15354.misc +++ /dev/null @@ -1 +0,0 @@ -Add some clarification to the doc/comments regarding TCP replication. diff --git a/changelog.d/15358.misc b/changelog.d/15358.misc deleted file mode 100644 index daf261a9f16d..000000000000 --- a/changelog.d/15358.misc +++ /dev/null @@ -1 +0,0 @@ -Fix copyright year in SSO footer template. diff --git a/changelog.d/15369.misc b/changelog.d/15369.misc deleted file mode 100644 index da990285cbab..000000000000 --- a/changelog.d/15369.misc +++ /dev/null @@ -1 +0,0 @@ -Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. diff --git a/changelog.d/15370.misc b/changelog.d/15370.misc deleted file mode 100644 index 23a18eaf07b8..000000000000 --- a/changelog.d/15370.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde from 1.0.158 to 1.0.159. diff --git a/changelog.d/15371.misc b/changelog.d/15371.misc deleted file mode 100644 index c21deede05a6..000000000000 --- a/changelog.d/15371.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde_json from 1.0.94 to 1.0.95. diff --git a/changelog.d/15381.feature b/changelog.d/15381.feature deleted file mode 100644 index ba5b578564e9..000000000000 --- a/changelog.d/15381.feature +++ /dev/null @@ -1 +0,0 @@ -Build Debian packages for Ubuntu 23.04 (Lunar Lobster). diff --git a/changelog.d/15383.bugfix b/changelog.d/15383.bugfix deleted file mode 100644 index 28c66ef45461..000000000000 --- a/changelog.d/15383.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. diff --git a/changelog.d/15385.misc b/changelog.d/15385.misc deleted file mode 100644 index 76350c398d6f..000000000000 --- a/changelog.d/15385.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up membership queries for users with forgotten rooms. diff --git a/changelog.d/15386.doc b/changelog.d/15386.doc deleted file mode 100644 index 5c032193aeef..000000000000 --- a/changelog.d/15386.doc +++ /dev/null @@ -1 +0,0 @@ -Note that Synapse 1.74 queued a rebuild of the user directory tables. diff --git a/debian/changelog b/debian/changelog index 98366d49166b..c3bea01c050a 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium + + * New Synapse release 1.81.0rc1. + + -- Synapse Packaging team Tue, 04 Apr 2023 14:29:03 +0100 + matrix-synapse-py3 (1.80.0) stable; urgency=medium * New Synapse release 1.80.0. diff --git a/pyproject.toml b/pyproject.toml index dc5522c4b516..3b38d8370c84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.80.0" +version = "1.81.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 6d103373e277b428389585af165fdccc34c4760f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Apr 2023 14:38:33 +0100 Subject: [PATCH 057/106] Update changelog --- CHANGES.md | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 1f77fab96078..2a6ee1490d74 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,19 @@ Synapse 1.81.0rc1 (2023-04-04) ============================== +Synapse now attempts the versioned appservice paths before falling back to the +[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). +Usage of the legacy routes should be considered deprecated. + +Additionally, Synapse has supported sending the application service access token +via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization) +since v1.70.0. For backwards compatibility it is *also* sent as the `access_token` +query parameter. This is insecure and should be considered deprecated. + +A future version of Synapse (v1.88.0 or later) will remove support for legacy +application service routes and query parameter authorization. + + Features -------- @@ -8,6 +21,7 @@ Features - Add a primitive helper script for listing worker endpoints. ([\#15243](https://github.com/matrix-org/synapse/issues/15243)) - Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). ([\#15314](https://github.com/matrix-org/synapse/issues/15314), [\#15321](https://github.com/matrix-org/synapse/issues/15321)) - Allow loading `/password_policy` endpoint on workers. ([\#15331](https://github.com/matrix-org/synapse/issues/15331)) +- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353)) - Build Debian packages for Ubuntu 23.04 (Lunar Lobster). ([\#15381](https://github.com/matrix-org/synapse/issues/15381)) @@ -20,7 +34,7 @@ Bugfixes to ensure that the sqlite database passed to the script exists before trying to port from it. ([\#15306](https://github.com/matrix-org/synapse/issues/15306)) - Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. ([\#15309](https://github.com/matrix-org/synapse/issues/15309)) - Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). ([\#15317](https://github.com/matrix-org/synapse/issues/15317)) -- Fix a long-standing bug preventing users from joining rooms, that they had been unbanned from, over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323)) +- Fix a long-standing bug preventing users from rejoining rooms after being banned and unbanned over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323)) - Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. ([\#15332](https://github.com/matrix-org/synapse/issues/15332)) - Fix a long-standing bug where some to_device messages could be dropped when using workers. ([\#15349](https://github.com/matrix-org/synapse/issues/15349)) - Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. ([\#15351](https://github.com/matrix-org/synapse/issues/15351)) @@ -32,6 +46,7 @@ Improved Documentation ---------------------- - Fix a typo in login requests ratelimit defaults. ([\#15341](https://github.com/matrix-org/synapse/issues/15341)) +- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354)) - Note that Synapse 1.74 queued a rebuild of the user directory tables. ([\#15386](https://github.com/matrix-org/synapse/issues/15386)) @@ -45,7 +60,7 @@ Internal Changes - Allow running the Twisted trunk job against other branches. ([\#15302](https://github.com/matrix-org/synapse/issues/15302)) - Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). ([\#15303](https://github.com/matrix-org/synapse/issues/15303)) - Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. ([\#15304](https://github.com/matrix-org/synapse/issues/15304)) -- Reject events with an invalid "mentions" property pert [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311)) +- Reject events with an invalid "mentions" property per [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311)) - As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. ([\#15316](https://github.com/matrix-org/synapse/issues/15316)) - Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. ([\#15319](https://github.com/matrix-org/synapse/issues/15319)) - Bump serde from 1.0.157 to 1.0.158. ([\#15324](https://github.com/matrix-org/synapse/issues/15324)) @@ -58,8 +73,6 @@ Internal Changes - Speed up unit tests when using SQLite3. ([\#15334](https://github.com/matrix-org/synapse/issues/15334)) - Speed up pydantic CI job. ([\#15339](https://github.com/matrix-org/synapse/issues/15339)) - Speed up sample config CI job. ([\#15340](https://github.com/matrix-org/synapse/issues/15340)) -- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353)) -- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354)) - Fix copyright year in SSO footer template. ([\#15358](https://github.com/matrix-org/synapse/issues/15358)) - Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. ([\#15369](https://github.com/matrix-org/synapse/issues/15369)) - Bump serde from 1.0.158 to 1.0.159. ([\#15370](https://github.com/matrix-org/synapse/issues/15370)) @@ -463,7 +476,7 @@ Those who are `poetry install`ing from source using our lockfile should ensure t Notes on faster joins --------------------- -The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms. +The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms. After a faster join, Synapse considers that room "partially joined". In this state, you should be able to From 6b23d74ad160d96e06bcc5b62acad56ade06bf6e Mon Sep 17 00:00:00 2001 From: Shay Date: Tue, 4 Apr 2023 13:16:08 -0700 Subject: [PATCH 058/106] Delete server-side backup keys when deactivating an account. (#15181) --- changelog.d/15181.bugfix | 1 + synapse/_scripts/synapse_port_db.py | 2 + synapse/handlers/deactivate_account.py | 3 + .../storage/databases/main/e2e_room_keys.py | 114 ++++++++++++- ..._e2e_backup_keys_for_deactivated_users.sql | 17 ++ tests/rest/client/test_account.py | 157 ++++++++++++++++++ 6 files changed, 291 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15181.bugfix create mode 100644 synapse/storage/schema/main/delta/74/04_delete_e2e_backup_keys_for_deactivated_users.sql diff --git a/changelog.d/15181.bugfix b/changelog.d/15181.bugfix new file mode 100644 index 000000000000..191bb6f611a7 --- /dev/null +++ b/changelog.d/15181.bugfix @@ -0,0 +1 @@ +Delete server-side backup keys when deactivating an account. \ No newline at end of file diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1dcb397ba456..a58ae2a308c6 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -59,6 +59,7 @@ from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore +from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore from synapse.storage.databases.main.event_push_actions import EventPushActionsStore from synapse.storage.databases.main.events_bg_updates import ( @@ -225,6 +226,7 @@ class Store( MainStateBackgroundUpdateStore, UserDirectoryBackgroundUpdateStore, EndToEndKeyBackgroundStore, + EndToEndRoomKeyBackgroundStore, StatsStore, AccountDataWorkerStore, PushRuleStore, diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index d31263c717b5..bd5867491baf 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -176,6 +176,9 @@ async def deactivate_account( # Remove account data (including ignored users and push rules). await self.store.purge_account_data_for_user(user_id) + # Delete any server-side backup keys + await self.store.bulk_delete_backup_keys_and_versions_for_user(user_id) + # Let modules know the user has been deactivated. await self._third_party_rules.on_user_deactivation_status_changed( user_id, diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py index 9f8d2e4bea65..d01f28cc803f 100644 --- a/synapse/storage/databases/main/e2e_room_keys.py +++ b/synapse/storage/databases/main/e2e_room_keys.py @@ -13,17 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Iterable, Mapping, Optional, Tuple, cast +from typing import TYPE_CHECKING, Dict, Iterable, Mapping, Optional, Tuple, cast from typing_extensions import Literal, TypedDict from synapse.api.errors import StoreError from synapse.logging.opentracing import log_kv, trace from synapse.storage._base import SQLBaseStore, db_to_json -from synapse.storage.database import LoggingTransaction +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + LoggingTransaction, +) from synapse.types import JsonDict, JsonSerializable, StreamKeyType from synapse.util import json_encoder +if TYPE_CHECKING: + from synapse.server import HomeServer + class RoomKey(TypedDict): """`KeyBackupData` in the Matrix spec. @@ -37,7 +44,82 @@ class RoomKey(TypedDict): session_data: JsonSerializable -class EndToEndRoomKeyStore(SQLBaseStore): +class EndToEndRoomKeyBackgroundStore(SQLBaseStore): + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + + self.db_pool.updates.register_background_update_handler( + "delete_e2e_backup_keys_for_deactivated_users", + self._delete_e2e_backup_keys_for_deactivated_users, + ) + + def _delete_keys_txn(self, txn: LoggingTransaction, user_id: str) -> None: + self.db_pool.simple_delete_txn( + txn, + table="e2e_room_keys", + keyvalues={"user_id": user_id}, + ) + + self.db_pool.simple_delete_txn( + txn, + table="e2e_room_keys_versions", + keyvalues={"user_id": user_id}, + ) + + async def _delete_e2e_backup_keys_for_deactivated_users( + self, progress: JsonDict, batch_size: int + ) -> int: + """ + Retroactively purges account data for users that have already been deactivated. + Gets run as a background update caused by a schema delta. + """ + + last_user: str = progress.get("last_user", "") + + def _delete_backup_keys_for_deactivated_users_txn( + txn: LoggingTransaction, + ) -> int: + sql = """ + SELECT name FROM users + WHERE deactivated = ? and name > ? + ORDER BY name ASC + LIMIT ? + """ + + txn.execute(sql, (1, last_user, batch_size)) + users = [row[0] for row in txn] + + for user in users: + self._delete_keys_txn(txn, user) + + if users: + self.db_pool.updates._background_update_progress_txn( + txn, + "delete_e2e_backup_keys_for_deactivated_users", + {"last_user": users[-1]}, + ) + + return len(users) + + number_deleted = await self.db_pool.runInteraction( + "_delete_backup_keys_for_deactivated_users", + _delete_backup_keys_for_deactivated_users_txn, + ) + + if number_deleted < batch_size: + await self.db_pool.updates._end_background_update( + "delete_e2e_backup_keys_for_deactivated_users" + ) + + return number_deleted + + +class EndToEndRoomKeyStore(EndToEndRoomKeyBackgroundStore): """The store for end to end room key backups. See https://spec.matrix.org/v1.1/client-server-api/#server-side-key-backups @@ -550,3 +632,29 @@ def _delete_e2e_room_keys_version_txn(txn: LoggingTransaction) -> None: await self.db_pool.runInteraction( "delete_e2e_room_keys_version", _delete_e2e_room_keys_version_txn ) + + async def bulk_delete_backup_keys_and_versions_for_user(self, user_id: str) -> None: + """ + Bulk deletes all backup room keys and versions for a given user. + + Args: + user_id: the user whose backup keys and versions we're deleting + """ + + def _delete_all_e2e_room_keys_and_versions_txn(txn: LoggingTransaction) -> None: + self.db_pool.simple_delete_txn( + txn, + table="e2e_room_keys", + keyvalues={"user_id": user_id}, + ) + + self.db_pool.simple_delete_txn( + txn, + table="e2e_room_keys_versions", + keyvalues={"user_id": user_id}, + ) + + await self.db_pool.runInteraction( + "delete_all_e2e_room_keys_and_versions", + _delete_all_e2e_room_keys_and_versions_txn, + ) diff --git a/synapse/storage/schema/main/delta/74/04_delete_e2e_backup_keys_for_deactivated_users.sql b/synapse/storage/schema/main/delta/74/04_delete_e2e_backup_keys_for_deactivated_users.sql new file mode 100644 index 000000000000..a194f4cecef3 --- /dev/null +++ b/synapse/storage/schema/main/delta/74/04_delete_e2e_backup_keys_for_deactivated_users.sql @@ -0,0 +1,17 @@ +/* Copyright 2023 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (7404, 'delete_e2e_backup_keys_for_deactivated_users', '{}'); \ No newline at end of file diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 7f675c44a278..ac19f3c6daef 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -474,6 +474,163 @@ def test_pending_invites(self) -> None: self.assertEqual(len(memberships), 1, memberships) self.assertEqual(memberships[0].room_id, room_id, memberships) + def test_deactivate_account_deletes_server_side_backup_keys(self) -> None: + key_handler = self.hs.get_e2e_room_keys_handler() + room_keys = { + "rooms": { + "!abc:matrix.org": { + "sessions": { + "c0ff33": { + "first_message_index": 1, + "forwarded_count": 1, + "is_verified": False, + "session_data": "SSBBTSBBIEZJU0gK", + } + } + } + } + } + + user_id = self.register_user("missPiggy", "test") + tok = self.login("missPiggy", "test") + + # add some backup keys/versions + version = self.get_success( + key_handler.create_version( + user_id, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "first_version_auth_data", + }, + ) + ) + + self.get_success(key_handler.upload_room_keys(user_id, version, room_keys)) + + version2 = self.get_success( + key_handler.create_version( + user_id, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": "second_version_auth_data", + }, + ) + ) + + self.get_success(key_handler.upload_room_keys(user_id, version2, room_keys)) + + self.deactivate(user_id, tok) + store = self.hs.get_datastores().main + + # Check that the user has been marked as deactivated. + self.assertTrue(self.get_success(store.get_user_deactivated_status(user_id))) + + # Check that there are no entries in 'e2e_room_keys` and `e2e_room_keys_versions` + res = self.get_success( + self.hs.get_datastores().main.db_pool.simple_select_list( + "e2e_room_keys", {"user_id": user_id}, "*", "simple_select" + ) + ) + self.assertEqual(len(res), 0) + + res2 = self.get_success( + self.hs.get_datastores().main.db_pool.simple_select_list( + "e2e_room_keys_versions", {"user_id": user_id}, "*", "simple_select" + ) + ) + self.assertEqual(len(res2), 0) + + def test_background_update_deletes_deactivated_users_server_side_backup_keys( + self, + ) -> None: + key_handler = self.hs.get_e2e_room_keys_handler() + room_keys = { + "rooms": { + "!abc:matrix.org": { + "sessions": { + "c0ff33": { + "first_message_index": 1, + "forwarded_count": 1, + "is_verified": False, + "session_data": "SSBBTSBBIEZJU0gK", + } + } + } + } + } + self.store = self.hs.get_datastores().main + + # create a bunch of users and add keys for them + users = [] + for i in range(0, 20): + user_id = self.register_user("missPiggy" + str(i), "test") + users.append((user_id,)) + + # add some backup keys/versions + version = self.get_success( + key_handler.create_version( + user_id, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": str(i) + "_version_auth_data", + }, + ) + ) + + self.get_success(key_handler.upload_room_keys(user_id, version, room_keys)) + + version2 = self.get_success( + key_handler.create_version( + user_id, + { + "algorithm": "m.megolm_backup.v1", + "auth_data": str(i) + "_version_auth_data", + }, + ) + ) + + self.get_success(key_handler.upload_room_keys(user_id, version2, room_keys)) + + # deactivate most of the users by editing DB + self.get_success( + self.store.db_pool.simple_update_many( + table="users", + key_names=("name",), + key_values=users[0:18], + value_names=("deactivated",), + value_values=[(1,) for i in range(1, 19)], + desc="", + ) + ) + + # run background update + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + { + "update_name": "delete_e2e_backup_keys_for_deactivated_users", + "progress_json": "{}", + }, + ) + ) + self.store.db_pool.updates._all_done = False + self.wait_for_background_updates() + + # check that keys are deleted for the deactivated users but not the others + res = self.get_success( + self.hs.get_datastores().main.db_pool.simple_select_list( + "e2e_room_keys", None, ("user_id",), "simple_select" + ) + ) + self.assertEqual(len(res), 4) + + res2 = self.get_success( + self.hs.get_datastores().main.db_pool.simple_select_list( + "e2e_room_keys_versions", None, ("user_id",), "simple_select" + ) + ) + self.assertEqual(len(res2), 4) + def deactivate(self, user_id: str, tok: str) -> None: request_data = { "auth": { From 6eb3edec473899f9145124d33a85b153e4a0cda9 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 5 Apr 2023 13:49:15 +0200 Subject: [PATCH 059/106] Fix the 'set_device_id_for_pushers_txn' background update. (#15391) Refer to the correct field from the response when updating the background update progress. --- changelog.d/15391.bugfix | 1 + synapse/storage/databases/main/pusher.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15391.bugfix diff --git a/changelog.d/15391.bugfix b/changelog.d/15391.bugfix new file mode 100644 index 000000000000..22b3bfe66801 --- /dev/null +++ b/changelog.d/15391.bugfix @@ -0,0 +1 @@ +Fix the `set_device_id_for_pushers_txn` background update crash. diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index aeb6034f46da..87e28e22d3c5 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -562,7 +562,7 @@ def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: ) self.db_pool.updates._background_update_progress_txn( - txn, "set_device_id_for_pushers", {"pusher_id": rows[-1]["id"]} + txn, "set_device_id_for_pushers", {"pusher_id": rows[-1]["pusher_id"]} ) return len(rows) From 83649b891db5fcca7b96c606293925bc47358730 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 5 Apr 2023 14:42:46 -0400 Subject: [PATCH 060/106] Implement MSC3989 to redact the origin field. (#15393) This will be done in a future room version, for now an unstable room version is added which redacts the origin field. --- changelog.d/15393.misc | 1 + synapse/api/room_versions.py | 37 ++++++++++++++++++++++++++++++++++++ synapse/events/utils.py | 5 ++++- tests/events/test_utils.py | 7 +++++++ 4 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15393.misc diff --git a/changelog.d/15393.misc b/changelog.d/15393.misc new file mode 100644 index 000000000000..24483c8d785c --- /dev/null +++ b/changelog.d/15393.misc @@ -0,0 +1 @@ +Implement [MSC3989](https://github.com/matrix-org/matrix-spec-proposals/pull/3989) redaction algorithm. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index c397920fe54e..bc15f2d0631f 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -104,6 +104,8 @@ class RoomVersion: # support the flag. Unknown flags are ignored by the evaluator, making conditions # fail if used. msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag + # MSC3989: Redact the origin field. + msc3989_redaction_rules: bool class RoomVersions: @@ -125,6 +127,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V2 = RoomVersion( "2", @@ -144,6 +147,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V3 = RoomVersion( "3", @@ -163,6 +167,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V4 = RoomVersion( "4", @@ -182,6 +187,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V5 = RoomVersion( "5", @@ -201,6 +207,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V6 = RoomVersion( "6", @@ -220,6 +227,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) MSC2176 = RoomVersion( "org.matrix.msc2176", @@ -239,6 +247,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V7 = RoomVersion( "7", @@ -258,6 +267,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V8 = RoomVersion( "8", @@ -277,6 +287,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V9 = RoomVersion( "9", @@ -296,6 +307,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) MSC3787 = RoomVersion( "org.matrix.msc3787", @@ -315,6 +327,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) V10 = RoomVersion( "10", @@ -334,6 +347,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, msc3931_push_features=(), + msc3989_redaction_rules=False, ) MSC2716v4 = RoomVersion( "org.matrix.msc2716v4", @@ -353,6 +367,7 @@ class RoomVersions: msc3787_knock_restricted_join_rule=False, msc3667_int_only_power_levels=False, msc3931_push_features=(), + msc3989_redaction_rules=False, ) MSC1767v10 = RoomVersion( # MSC1767 (Extensible Events) based on room version "10" @@ -373,6 +388,27 @@ class RoomVersions: msc3787_knock_restricted_join_rule=True, msc3667_int_only_power_levels=True, msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,), + msc3989_redaction_rules=False, + ) + MSC3989 = RoomVersion( + "org.matrix.msc3989", + RoomDisposition.UNSTABLE, + EventFormatVersions.ROOM_V4_PLUS, + StateResolutionVersions.V2, + enforce_key_validity=True, + special_case_aliases_auth=False, + strict_canonicaljson=True, + limit_notifications_power_levels=True, + msc2176_redaction_rules=False, + msc3083_join_rules=True, + msc3375_redaction_rules=True, + msc2403_knocking=True, + msc2716_historical=False, + msc2716_redactions=False, + msc3787_knock_restricted_join_rule=True, + msc3667_int_only_power_levels=True, + msc3931_push_features=(), + msc3989_redaction_rules=True, ) @@ -392,6 +428,7 @@ class RoomVersions: RoomVersions.MSC3787, RoomVersions.V10, RoomVersions.MSC2716v4, + RoomVersions.MSC3989, ) } diff --git a/synapse/events/utils.py b/synapse/events/utils.py index c14c7791db27..1d5d7491cd83 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -106,7 +106,6 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic "depth", "prev_events", "auth_events", - "origin", "origin_server_ts", ] @@ -114,6 +113,10 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic if not room_version.msc2176_redaction_rules: allowed_keys.extend(["prev_state", "membership"]) + # Room versions before MSC3989 kept the origin field. + if not room_version.msc3989_redaction_rules: + allowed_keys.append("origin") + event_type = event_dict["type"] new_content = {} diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index 4174a237ec84..c35f58f46213 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -143,6 +143,13 @@ def test_basic_keys(self) -> None: room_version=RoomVersions.MSC2176, ) + # As of MSC3989 we now redact the origin key. + self.run_test( + {"type": "A", "origin": "example.com"}, + {"type": "A", "content": {}, "signatures": {}, "unsigned": {}}, + room_version=RoomVersions.MSC3989, + ) + def test_unsigned(self) -> None: """Ensure that unsigned properties get stripped (except age_ts and replaces_state).""" self.run_test( From ec6430bad88ee7ab0b6d9df0d54cdbe68fa3c431 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 6 Apr 2023 15:23:36 +0100 Subject: [PATCH 061/106] Use setup-go instead of relying on go 1.17 to exist (#15403) * Use setup-go instead of relying on 1.17 to exist See https://github.com/actions/runner-images/issues/7276 * Changelog --- .ci/scripts/setup_complement_prerequisites.sh | 10 ---------- .github/workflows/latest_deps.yml | 2 ++ .github/workflows/tests.yml | 2 ++ .github/workflows/twisted_trunk.yml | 2 ++ changelog.d/15403.misc | 1 + 5 files changed, 7 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15403.misc diff --git a/.ci/scripts/setup_complement_prerequisites.sh b/.ci/scripts/setup_complement_prerequisites.sh index 3778478da644..47a3ff8e6966 100755 --- a/.ci/scripts/setup_complement_prerequisites.sh +++ b/.ci/scripts/setup_complement_prerequisites.sh @@ -9,16 +9,6 @@ set -eu alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func' alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func' -block Set Go Version - # The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement. - # See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path - - # Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2 - echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH - # Add the Go path to the PATH: We need this so we can call gotestfmt - echo "~/go/bin" >> $GITHUB_PATH -endblock - block Install Complement Dependencies sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index d5a68ffa1f12..c3705b059b20 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -184,6 +184,8 @@ jobs: with: path: synapse + - uses: actions/setup-go@v4 + - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a2cec324a549..f895163e517a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -555,6 +555,8 @@ jobs: toolchain: 1.58.1 - uses: Swatinem/rust-cache@v2 + - uses: actions/setup-go@v4 + - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 461c85067ca4..ad35c6b57089 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -147,6 +147,8 @@ jobs: with: path: synapse + - uses: actions/setup-go@v4 + - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh diff --git a/changelog.d/15403.misc b/changelog.d/15403.misc new file mode 100644 index 000000000000..8d17b737bf92 --- /dev/null +++ b/changelog.d/15403.misc @@ -0,0 +1 @@ +Update CI to run complement under the latest stable go version. From edf046ece7c14cd8f01a4e5ebcbeefac9e0b6717 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 6 Apr 2023 16:08:28 +0100 Subject: [PATCH 062/106] 1.81.0rc2 --- CHANGES.md | 17 ++++++++++++++++- changelog.d/15391.bugfix | 1 - changelog.d/15403.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 5 files changed, 23 insertions(+), 4 deletions(-) delete mode 100644 changelog.d/15391.bugfix delete mode 100644 changelog.d/15403.misc diff --git a/CHANGES.md b/CHANGES.md index 2a6ee1490d74..70f1e0cbd05c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -Synapse 1.81.0rc1 (2023-04-04) +Synapse 1.81.0rc2 (2023-04-06) ============================== Synapse now attempts the versioned appservice paths before falling back to the @@ -14,6 +14,21 @@ A future version of Synapse (v1.88.0 or later) will remove support for legacy application service routes and query parameter authorization. +Bugfixes +-------- + +- Fix the `set_device_id_for_pushers_txn` background update crash. ([\#15391](https://github.com/matrix-org/synapse/issues/15391)) + + +Internal Changes +---------------- + +- Update CI to run complement under the latest stable go version. ([\#15403](https://github.com/matrix-org/synapse/issues/15403)) + + +Synapse 1.81.0rc1 (2023-04-04) +============================== + Features -------- diff --git a/changelog.d/15391.bugfix b/changelog.d/15391.bugfix deleted file mode 100644 index 22b3bfe66801..000000000000 --- a/changelog.d/15391.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix the `set_device_id_for_pushers_txn` background update crash. diff --git a/changelog.d/15403.misc b/changelog.d/15403.misc deleted file mode 100644 index 8d17b737bf92..000000000000 --- a/changelog.d/15403.misc +++ /dev/null @@ -1 +0,0 @@ -Update CI to run complement under the latest stable go version. diff --git a/debian/changelog b/debian/changelog index c3bea01c050a..850afe12a93f 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium + + * New Synapse release 1.81.0rc2. + + -- Synapse Packaging team Thu, 06 Apr 2023 16:07:54 +0100 + matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium * New Synapse release 1.81.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 3b38d8370c84..1386dd859f85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.81.0rc1" +version = "1.81.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 485b9fdefb9f45df172ff5044d6a02a177b7de19 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Apr 2023 17:42:39 +0100 Subject: [PATCH 063/106] Don't keep old stream_ordering_to_exterm around (#15382) --- changelog.d/15382.misc | 1 + synapse/handlers/device.py | 10 ++++ .../databases/main/event_federation.py | 52 +++++++++++++++---- 3 files changed, 53 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15382.misc diff --git a/changelog.d/15382.misc b/changelog.d/15382.misc new file mode 100644 index 000000000000..c5b054d19e72 --- /dev/null +++ b/changelog.d/15382.misc @@ -0,0 +1 @@ +Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 9ded6389acdb..d2063d443558 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -215,6 +215,16 @@ async def get_user_ids_changed( possibly_changed = set(changed) possibly_left = set() for room_id in rooms_changed: + # Check if the forward extremities have changed. If not then we know + # the current state won't have changed, and so we can skip this room. + try: + if not await self.store.have_room_forward_extremities_changed_since( + room_id, stream_ordering + ): + continue + except errors.StoreError: + pass + current_state_ids = await self._state_storage.get_current_state_ids( room_id, await_full_state=False ) diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index a19ba88bf8af..9e6011e8ea18 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -1171,6 +1171,38 @@ def _get_min_depth_interaction( return int(min_depth) if min_depth is not None else None + async def have_room_forward_extremities_changed_since( + self, + room_id: str, + stream_ordering: int, + ) -> bool: + """Check if the forward extremities in a room have changed since the + given stream ordering + + Throws a StoreError if we have since purged the index for + stream_orderings from that point. + """ + + if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined] + raise StoreError(400, f"stream_ordering too old {stream_ordering}") + + sql = """ + SELECT 1 FROM stream_ordering_to_exterm + WHERE stream_ordering > ? AND room_id = ? + LIMIT 1 + """ + + def have_room_forward_extremities_changed_since_txn( + txn: LoggingTransaction, + ) -> bool: + txn.execute(sql, (stream_ordering, room_id)) + return txn.fetchone() is not None + + return await self.db_pool.runInteraction( + "have_room_forward_extremities_changed_since", + have_room_forward_extremities_changed_since_txn, + ) + @cancellable async def get_forward_extremities_for_room_at_stream_ordering( self, room_id: str, stream_ordering: int @@ -1232,10 +1264,17 @@ def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: txn.execute(sql, (stream_ordering, room_id)) return [event_id for event_id, in txn] - return await self.db_pool.runInteraction( + event_ids = await self.db_pool.runInteraction( "get_forward_extremeties_for_room", get_forward_extremeties_for_room_txn ) + # If we didn't find any IDs, then we must have cleared out the + # associated `stream_ordering_to_exterm`. + if not event_ids: + raise StoreError(400, "stream_ordering too old %s" % (stream_ordering,)) + + return event_ids + def _get_connected_batch_event_backfill_results_txn( self, txn: LoggingTransaction, insertion_event_id: str, limit: int ) -> List[BackfillQueueNavigationItem]: @@ -1664,19 +1703,12 @@ async def get_successor_events(self, event_id: str) -> List[str]: @wrap_as_background_process("delete_old_forward_extrem_cache") async def _delete_old_forward_extrem_cache(self) -> None: def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None: - # Delete entries older than a month, while making sure we don't delete - # the only entries for a room. sql = """ DELETE FROM stream_ordering_to_exterm - WHERE - room_id IN ( - SELECT room_id - FROM stream_ordering_to_exterm - WHERE stream_ordering > ? - ) AND stream_ordering < ? + WHERE stream_ordering < ? """ txn.execute( - sql, (self.stream_ordering_month_ago, self.stream_ordering_month_ago) # type: ignore[attr-defined] + sql, (self.stream_ordering_month_ago) # type: ignore[attr-defined] ) await self.db_pool.runInteraction( From e708a33cd917d520ea89373bbcc3ea22b9a22a1e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 17:50:39 +0100 Subject: [PATCH 064/106] Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1 (#15404) * Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1 Bumps [dawidd6/action-download-artifact](https://github.com/dawidd6/action-download-artifact) from 2.26.0 to 2.26.1. - [Release notes](https://github.com/dawidd6/action-download-artifact/releases) - [Commits](https://github.com/dawidd6/action-download-artifact/compare/5e780fc7bbd0cac69fc73271ed86edf5dcb72d67...7132ab516fba5f602fafae6fdd4822afa10db76f) --- updated-dependencies: - dependency-name: dawidd6/action-download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/docs-pr-netlify.yaml | 2 +- changelog.d/15404.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15404.misc diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index a5e74eb297db..6cba6315c216 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0 + uses: dawidd6/action-download-artifact@7132ab516fba5f602fafae6fdd4822afa10db76f # v2.26.1 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} diff --git a/changelog.d/15404.misc b/changelog.d/15404.misc new file mode 100644 index 000000000000..c5ad99073f1d --- /dev/null +++ b/changelog.d/15404.misc @@ -0,0 +1 @@ +Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1. From d62076003d4f88f3fe9b7bbf4e8022d5187fb709 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 19:08:28 +0100 Subject: [PATCH 065/106] Bump types-opentracing from 2.4.10.3 to 2.4.10.4 (#15376) * Bump types-opentracing from 2.4.10.3 to 2.4.10.4 Bumps [types-opentracing](https://github.com/python/typeshed) from 2.4.10.3 to 2.4.10.4. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-opentracing dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15376.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15376.misc diff --git a/changelog.d/15376.misc b/changelog.d/15376.misc new file mode 100644 index 000000000000..34d24dbf9c06 --- /dev/null +++ b/changelog.d/15376.misc @@ -0,0 +1 @@ +Bump types-opentracing from 2.4.10.3 to 2.4.10.4. diff --git a/poetry.lock b/poetry.lock index 978a6e159838..f4a7cf8e0034 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3045,14 +3045,14 @@ files = [ [[package]] name = "types-opentracing" -version = "2.4.10.3" +version = "2.4.10.4" description = "Typing stubs for opentracing" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-opentracing-2.4.10.3.tar.gz", hash = "sha256:b277f114265b41216714f9c77dffcab57038f1730fd141e2c55c5c9f6f2caa87"}, - {file = "types_opentracing-2.4.10.3-py3-none-any.whl", hash = "sha256:60244d718fcd9de7043645ecaf597222d550432507098ab2e6268f7b589a7fa7"}, + {file = "types-opentracing-2.4.10.4.tar.gz", hash = "sha256:347040c9da4ada7d3c795659912c95d98c5651e242e8eaa0344815fee5bb97e2"}, + {file = "types_opentracing-2.4.10.4-py3-none-any.whl", hash = "sha256:73c9b958eea3df6c4906ebf3865608a562dd9981c1bbc75a373a583c613bed56"}, ] [[package]] From b5355dfde8356da42dc053b4ee2a20ce2d2798a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 19:08:59 +0100 Subject: [PATCH 066/106] Bump types-netaddr from 0.8.0.6 to 0.8.0.7 (#15375) * Bump types-netaddr from 0.8.0.6 to 0.8.0.7 Bumps [types-netaddr](https://github.com/python/typeshed) from 0.8.0.6 to 0.8.0.7. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-netaddr dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: David Robertson --- changelog.d/15375.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15375.misc diff --git a/changelog.d/15375.misc b/changelog.d/15375.misc new file mode 100644 index 000000000000..7ea628047090 --- /dev/null +++ b/changelog.d/15375.misc @@ -0,0 +1 @@ +Bump types-netaddr from 0.8.0.6 to 0.8.0.7. diff --git a/poetry.lock b/poetry.lock index f4a7cf8e0034..03a36d3be6ab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3033,14 +3033,14 @@ files = [ [[package]] name = "types-netaddr" -version = "0.8.0.6" +version = "0.8.0.7" description = "Typing stubs for netaddr" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-netaddr-0.8.0.6.tar.gz", hash = "sha256:e5048640c2412e7ea2d3eb02c94ae1b50442b2c7a50a7c48e957676139cdf19b"}, - {file = "types_netaddr-0.8.0.6-py3-none-any.whl", hash = "sha256:d4d40d1ba35430a4e4c929596542cd37e6831f5d08676b33dc84e06e01a840f6"}, + {file = "types-netaddr-0.8.0.7.tar.gz", hash = "sha256:3362864fa0258782d449b91707f37e55f62290b4f438974a08758b498169e109"}, + {file = "types_netaddr-0.8.0.7-py3-none-any.whl", hash = "sha256:a540cdfb2f858a0509ce5a4e4fcc80ef11b19f10a2473e48d32217af517818c0"}, ] [[package]] From 61251275fe19d01be59e890b4193aaffd19893b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 19:09:24 +0100 Subject: [PATCH 067/106] Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9 (#15374) * Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9 Bumps [types-psycopg2](https://github.com/python/typeshed) from 2.9.21.8 to 2.9.21.9. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-psycopg2 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: Mathieu Velten Co-authored-by: David Robertson --- changelog.d/15374.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15374.misc diff --git a/changelog.d/15374.misc b/changelog.d/15374.misc new file mode 100644 index 000000000000..e0c076719c1d --- /dev/null +++ b/changelog.d/15374.misc @@ -0,0 +1 @@ +Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9. diff --git a/poetry.lock b/poetry.lock index 03a36d3be6ab..acb98adb63c0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3069,14 +3069,14 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.8" +version = "2.9.21.9" description = "Typing stubs for psycopg2" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-psycopg2-2.9.21.8.tar.gz", hash = "sha256:b629440ffcfdebd742fab07f777ff69aefdd19394a138c18e921a1964c3cf5f6"}, - {file = "types_psycopg2-2.9.21.8-py3-none-any.whl", hash = "sha256:e747fbec6e0e2502b625bc7686d13cc62fc170e8ae920e5ba27fac946778eeb9"}, + {file = "types-psycopg2-2.9.21.9.tar.gz", hash = "sha256:388dc36a04551632289c4aaf1fc5b91e147654b165db896d094844e216f22bf5"}, + {file = "types_psycopg2-2.9.21.9-py3-none-any.whl", hash = "sha256:0332525fb9d3031d3da46f091e7d40b2c4d4958e9c00d2b4c1eaaa9f8ef9de4e"}, ] [[package]] From d5cc911167d6815ae877a6c7146b77f00cd1d926 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 6 Apr 2023 19:41:29 +0100 Subject: [PATCH 068/106] Trust dtolnay/rust-toolchain (#15406) * Trust dtolnay/rust-toolchain The author is a big deal in the Rust world and I'm happy to trust them. I'm also bored of the dependabot updates tbh. * Changelog --- .github/workflows/latest_deps.yml | 12 ++--- .github/workflows/tests.yml | 70 +++++------------------------ .github/workflows/twisted_trunk.yml | 12 ++--- changelog.d/15406.misc | 1 + 4 files changed, 18 insertions(+), 77 deletions(-) create mode 100644 changelog.d/15406.misc diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index c3705b059b20..452600ba1633 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -27,9 +27,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 # The dev dependencies aren't exposed in the wheel metadata (at least with current @@ -61,9 +59,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - run: sudo apt-get -qq install xmlsec1 @@ -134,9 +130,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - name: Ensure sytest runs `pip install` diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f895163e517a..f01342c8b776 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,12 +35,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -104,12 +99,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -126,12 +116,8 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + uses: dtolnay/rust-toolchain@1.58.1 with: - toolchain: 1.58.1 components: clippy - uses: Swatinem/rust-cache@v2 @@ -148,10 +134,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + uses: dtolnay/rust-toolchain@master with: toolchain: nightly-2022-12-01 components: clippy @@ -168,10 +151,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + uses: dtolnay/rust-toolchain@master with: # We use nightly so that it correctly groups together imports toolchain: nightly-2022-12-01 @@ -236,12 +216,7 @@ jobs: postgres:${{ matrix.job.postgres-version }} - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -281,12 +256,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 # There aren't wheels for some of the older deps, so we need to install @@ -402,12 +372,7 @@ jobs: run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - name: Run SyTest @@ -547,12 +512,7 @@ jobs: path: synapse - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - uses: actions/setup-go@v4 @@ -580,12 +540,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: 1.58.1 + uses: dtolnay/rust-toolchain@1.58.1 - uses: Swatinem/rust-cache@v2 - run: cargo test @@ -603,10 +558,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - # There don't seem to be versioned releases of this action per se: for each rust - # version there is a branch which gets constantly rebased on top of master. - # We pin to a specific commit for paranoia's sake. - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d + uses: dtolnay/rust-toolchain@master with: toolchain: nightly-2022-12-01 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index ad35c6b57089..14fc6a0389c3 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -25,9 +25,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -50,9 +48,7 @@ jobs: - run: sudo apt-get -qq install xmlsec1 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -89,9 +85,7 @@ jobs: - uses: actions/checkout@v3 - name: Install Rust - uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d - with: - toolchain: stable + uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - name: Patch dependencies diff --git a/changelog.d/15406.misc b/changelog.d/15406.misc new file mode 100644 index 000000000000..d2f9eb0dd473 --- /dev/null +++ b/changelog.d/15406.misc @@ -0,0 +1 @@ +Trust dtonlay/rust-toolchain in CI. From 3ad221ea4033e07893842dcf9a96155e9648f754 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 20:14:49 +0100 Subject: [PATCH 069/106] Bump pyopenssl from 23.1.0 to 23.1.1 (#15373) * Bump pyopenssl from 23.1.0 to 23.1.1 Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.1.0 to 23.1.1. - [Release notes](https://github.com/pyca/pyopenssl/releases) - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.1.0...23.1.1) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions Co-authored-by: Mathieu Velten Co-authored-by: David Robertson --- changelog.d/15373.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15373.misc diff --git a/changelog.d/15373.misc b/changelog.d/15373.misc new file mode 100644 index 000000000000..e93801b0de93 --- /dev/null +++ b/changelog.d/15373.misc @@ -0,0 +1 @@ +Bump pyopenssl from 23.1.0 to 23.1.1. diff --git a/poetry.lock b/poetry.lock index acb98adb63c0..3c464ac3e231 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2060,14 +2060,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "23.1.0" +version = "23.1.1" description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "pyOpenSSL-23.1.0-py3-none-any.whl", hash = "sha256:fb96e936866ad65662c22d0de84ca0fba58397893cdfe0f01334fa93382af23c"}, - {file = "pyOpenSSL-23.1.0.tar.gz", hash = "sha256:8cb78010a1eb2c8e24b851693b7b04dfe9b1dc0a5ab3843927b10a85b1dfbb2e"}, + {file = "pyOpenSSL-23.1.1-py3-none-any.whl", hash = "sha256:9e0c526404a210df9d2b18cd33364beadb0dc858a739b885677bc65e105d4a4c"}, + {file = "pyOpenSSL-23.1.1.tar.gz", hash = "sha256:841498b9bec61623b1b6c47ebbc02367c07d60e0e195f19790817f10cc8db0b7"}, ] [package.dependencies] From d07d2558305057cba6a7e2cb7644c24cf2dcf9be Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 6 Apr 2023 16:26:28 -0400 Subject: [PATCH 070/106] Implement MSC2175: remove the creator field from create events. (#15394) --- changelog.d/15394.misc | 1 + synapse/api/constants.py | 2 ++ synapse/api/room_versions.py | 17 +++++++++++++++++ synapse/event_auth.py | 25 ++++++++++++++++++------- synapse/handlers/federation_event.py | 5 ++++- synapse/handlers/message.py | 7 ++++++- synapse/handlers/room.py | 22 ++++++++++++---------- synapse/storage/databases/main/room.py | 17 ++++++++++++----- 8 files changed, 72 insertions(+), 24 deletions(-) create mode 100644 changelog.d/15394.misc diff --git a/changelog.d/15394.misc b/changelog.d/15394.misc new file mode 100644 index 000000000000..91e6540438a6 --- /dev/null +++ b/changelog.d/15394.misc @@ -0,0 +1 @@ +Implement [MSC2175](https://github.com/matrix-org/matrix-doc/pull/2175) to stop adding `creator` to create events. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 0f224b34cd95..c56b2f256150 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -215,6 +215,8 @@ class EventContentFields: FEDERATE: Final = "m.federate" # The creator of the room, as used in `m.room.create` events. + # + # This is deprecated in MSC2175. ROOM_CREATOR: Final = "creator" # Used in m.room.guest_access events. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index bc15f2d0631f..3dcae12161d7 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -78,6 +78,8 @@ class RoomVersion: # MSC2209: Check 'notifications' key while verifying # m.room.power_levels auth rules. limit_notifications_power_levels: bool + # MSC2175: No longer include the creator in m.room.create events. + msc2175_implicit_room_creator: bool # MSC2174/MSC2176: Apply updated redaction rules algorithm. msc2176_redaction_rules: bool # MSC3083: Support the 'restricted' join_rule. @@ -118,6 +120,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -138,6 +141,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -158,6 +162,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -178,6 +183,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -198,6 +204,7 @@ class RoomVersions: special_case_aliases_auth=True, strict_canonicaljson=False, limit_notifications_power_levels=False, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -218,6 +225,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -238,6 +246,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=True, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -258,6 +267,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -278,6 +288,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=False, @@ -298,6 +309,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=True, @@ -318,6 +330,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=True, @@ -338,6 +351,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=True, @@ -358,6 +372,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=False, msc3375_redaction_rules=False, @@ -379,6 +394,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=True, @@ -399,6 +415,7 @@ class RoomVersions: special_case_aliases_auth=False, strict_canonicaljson=True, limit_notifications_power_levels=True, + msc2175_implicit_room_creator=False, msc2176_redaction_rules=False, msc3083_join_rules=True, msc3375_redaction_rules=True, diff --git a/synapse/event_auth.py b/synapse/event_auth.py index af55874b5c47..f95d00d4729b 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -455,8 +455,11 @@ def _check_create(event: "EventBase") -> None: "room appears to have unsupported version %s" % (room_version_prop,), ) - # 1.4 If content has no creator field, reject. - if EventContentFields.ROOM_CREATOR not in event.content: + # 1.4 If content has no creator field, reject if the room version requires it. + if ( + not event.room_version.msc2175_implicit_room_creator + and EventContentFields.ROOM_CREATOR not in event.content + ): raise AuthError(403, "Create event lacks a 'creator' property") @@ -491,7 +494,11 @@ def _is_membership_change_allowed( key = (EventTypes.Create, "") create = auth_events.get(key) if create and event.prev_event_ids()[0] == create.event_id: - if create.content["creator"] == event.state_key: + if room_version.msc2175_implicit_room_creator: + creator = create.sender + else: + creator = create.content[EventContentFields.ROOM_CREATOR] + if creator == event.state_key: return target_user_id = event.state_key @@ -1004,10 +1011,14 @@ def get_user_power_level(user_id: str, auth_events: StateMap["EventBase"]) -> in # that. key = (EventTypes.Create, "") create_event = auth_events.get(key) - if create_event is not None and create_event.content["creator"] == user_id: - return 100 - else: - return 0 + if create_event is not None: + if create_event.room_version.msc2175_implicit_room_creator: + creator = create_event.sender + else: + creator = create_event.content[EventContentFields.ROOM_CREATOR] + if creator == user_id: + return 100 + return 0 def get_named_level(auth_events: StateMap["EventBase"], name: str, default: int) -> int: diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 982c8d3b2ff1..8d5be81a9207 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1515,7 +1515,10 @@ async def _handle_marker_event(self, origin: str, marker_event: EventBase) -> No # support it or the event is not from the room creator. room_version = await self._store.get_room_version(marker_event.room_id) create_event = await self._store.get_create_event_for_room(marker_event.room_id) - room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) + if not room_version.msc2175_implicit_room_creator: + room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) + else: + room_creator = create_event.sender if not room_version.msc2716_historical and ( not self._config.experimental.msc2716_enabled or marker_event.sender != room_creator diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 4c75433a635f..a17fe3bf530b 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1909,7 +1909,12 @@ async def persist_and_notify_client_events( room_version_obj = KNOWN_ROOM_VERSIONS[room_version] create_event = await self.store.get_create_event_for_room(event.room_id) - room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) + if not room_version_obj.msc2175_implicit_room_creator: + room_creator = create_event.content.get( + EventContentFields.ROOM_CREATOR + ) + else: + room_creator = create_event.sender # Only check an insertion event if the room version # supports it or the event is from the room creator. diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index be120cb12f36..2d69cabf43d0 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -567,6 +567,7 @@ async def clone_existing_room( await self._send_events_for_new_room( requester, new_room_id, + new_room_version, # we expect to override all the presets with initial_state, so this is # somewhat arbitrary. room_config={"preset": RoomCreationPreset.PRIVATE_CHAT}, @@ -922,6 +923,7 @@ async def create_room( ) = await self._send_events_for_new_room( requester, room_id, + room_version, room_config=config, invite_list=invite_list, initial_state=initial_state, @@ -998,6 +1000,7 @@ async def _send_events_for_new_room( self, creator: Requester, room_id: str, + room_version: RoomVersion, room_config: JsonDict, invite_list: List[str], initial_state: MutableStateMap, @@ -1020,6 +1023,8 @@ async def _send_events_for_new_room( the user requesting the room creation room_id: room id for the room being created + room_version: + The room version of the new room. room_config: A dict of configuration options. This will be the body of a /createRoom request; see @@ -1053,14 +1058,6 @@ async def _send_events_for_new_room( # (as this info can't be pulled from the db) state_map: MutableStateMap[str] = {} - def create_event_dict(etype: str, content: JsonDict, **kwargs: Any) -> JsonDict: - e = {"type": etype, "content": content} - - e.update(event_keys) - e.update(kwargs) - - return e - async def create_event( etype: str, content: JsonDict, @@ -1083,7 +1080,10 @@ async def create_event( nonlocal depth nonlocal prev_event - event_dict = create_event_dict(etype, content, **kwargs) + # Create the event dictionary. + event_dict = {"type": etype, "content": content} + event_dict.update(event_keys) + event_dict.update(kwargs) ( new_event, @@ -1120,7 +1120,9 @@ async def create_event( 400, f"'{preset_config}' is not a valid preset", errcode=Codes.BAD_JSON ) - creation_content.update({"creator": creator_id}) + # MSC2175 removes the creator field from the create event. + if not room_version.msc2175_implicit_room_creator: + creation_content["creator"] = creator_id creation_event, unpersisted_creation_context = await create_event( EventTypes.Create, creation_content, False ) diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 3825bd60797c..dd7dbb6901c8 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -1998,6 +1998,9 @@ def _background_populate_rooms_creator_column_txn( for room_id, event_json in room_id_to_create_event_results: event_dict = db_to_json(event_json) + # The creator property might not exist in newer room versions, but + # for those versions the creator column should be properly populate + # during room creation. creator = event_dict.get("content").get(EventContentFields.ROOM_CREATOR) self.db_pool.simple_update_txn( @@ -2132,12 +2135,16 @@ async def upsert_room_on_join( # invalid, and it would fail auth checks anyway. raise StoreError(400, "No create event in state") - room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) + # Before MSC2175, the room creator was a separate field. + if not room_version.msc2175_implicit_room_creator: + room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) - if not isinstance(room_creator, str): - # If the create event does not have a creator then the room is - # invalid, and it would fail auth checks anyway. - raise StoreError(400, "No creator defined on the create event") + if not isinstance(room_creator, str): + # If the create event does not have a creator then the room is + # invalid, and it would fail auth checks anyway. + raise StoreError(400, "No creator defined on the create event") + else: + room_creator = create_event.sender await self.db_pool.simple_upsert( desc="upsert_room_on_join", From 3dd72b924edf5ba2dc3738154e5fe29bc61597c7 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Mon, 10 Apr 2023 13:25:08 +0100 Subject: [PATCH 071/106] Attempt to fix weird mypy failures on ignored files. (#15409) By inlining the typechecking job from backend-meta. This seems to resolve odd errors (maybe due to caching?) to have been seen on Dependabot PRs. --- .github/workflows/tests.yml | 57 +++++++++++++++++++++++++++++++++++-- changelog.d/15409.misc | 1 + 2 files changed, 55 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15409.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f01342c8b776..4333f55a533d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -65,9 +65,59 @@ jobs: - run: .ci/scripts/check_lockfile.py lint: - uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2" - with: - typechecking-extras: "all" + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Setup Poetry + uses: matrix-org/setup-python-poetry@v1 + with: + install-project: "false" + + - name: Import order (isort) + run: poetry run isort --check --diff . + + - name: Code style (black) + run: poetry run black --check --diff . + + - name: Semantic checks (ruff) + # --quiet suppresses the update check. + run: poetry run ruff --quiet . + + lint-mypy: + runs-on: ubuntu-latest + name: Typechecking + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Setup Poetry + uses: matrix-org/setup-python-poetry@v1 + with: + # We want to make use of type hints in optional dependencies too. + extras: all + # We have seen odd mypy failures that were resolved when we started + # installing the project again: + # https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775 + # To make CI green, err towards caution and install the project. + install-project: "true" + + - name: Install Rust + uses: dtolnay/rust-toolchain@1.58.1 + - uses: Swatinem/rust-cache@v2 + + # NB: I have two concerns with this action: + # 1. We occasionally see odd mypy problems that aren't reproducible + # locally with clean caches. I suspect some dodgy caching behaviour. + # 2. The action uses GHA machinery that's deprecated + # (https://github.com/AustinScola/mypy-cache-github-action/issues/277) + # It may be simpler to use actions/cache ourselves to restore .mypy_cache. + - name: Restore/persist mypy's cache + uses: AustinScola/mypy-cache-github-action@df56268388422ee282636ee2c7a9cc55ec644a41 + + - name: Run mypy + run: poetry run mypy lint-crlf: runs-on: ubuntu-latest @@ -165,6 +215,7 @@ jobs: if: ${{ !cancelled() }} # Run this even if prior jobs were skipped needs: - lint + - lint-mypy - lint-crlf - lint-newsfile - lint-pydantic diff --git a/changelog.d/15409.misc b/changelog.d/15409.misc new file mode 100644 index 000000000000..007111da34f2 --- /dev/null +++ b/changelog.d/15409.misc @@ -0,0 +1 @@ +Explicitly install Synapse during typechecking in CI. From 70781d36919d0b3d9d2b9638032dd8113e138c18 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 08:37:42 -0400 Subject: [PATCH 072/106] Bump black from 23.1.0 to 23.3.0 (#15372) --- changelog.d/15372.misc | 1 + poetry.lock | 52 +++++++++++++++++++++--------------------- 2 files changed, 27 insertions(+), 26 deletions(-) create mode 100644 changelog.d/15372.misc diff --git a/changelog.d/15372.misc b/changelog.d/15372.misc new file mode 100644 index 000000000000..a191353ff0f3 --- /dev/null +++ b/changelog.d/15372.misc @@ -0,0 +1 @@ +Bump black from 23.1.0 to 23.3.0. diff --git a/poetry.lock b/poetry.lock index 3c464ac3e231..2c309a47ec17 100644 --- a/poetry.lock +++ b/poetry.lock @@ -156,37 +156,37 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.1.0" +version = "23.3.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, - {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, - {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, - {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, - {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, - {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, - {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, - {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, - {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, - {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, - {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, - {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, - {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, - {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [package.dependencies] From 8e0a3428d7e9d842917f3fa949f580e159e5b0a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 09:18:40 -0400 Subject: [PATCH 073/106] Bump types-pillow from 9.4.0.17 to 9.4.0.19 (#15413) --- changelog.d/15413.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15413.misc diff --git a/changelog.d/15413.misc b/changelog.d/15413.misc new file mode 100644 index 000000000000..38acc41fef2f --- /dev/null +++ b/changelog.d/15413.misc @@ -0,0 +1 @@ +Bump types-pillow from 9.4.0.17 to 9.4.0.19. diff --git a/poetry.lock b/poetry.lock index 2c309a47ec17..c4eb368174f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3057,14 +3057,14 @@ files = [ [[package]] name = "types-pillow" -version = "9.4.0.17" +version = "9.4.0.19" description = "Typing stubs for Pillow" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-Pillow-9.4.0.17.tar.gz", hash = "sha256:7f0e871d2d46fbb6bc7deca3e02dc552cf9c1e8b49deb9595509551be3954e49"}, - {file = "types_Pillow-9.4.0.17-py3-none-any.whl", hash = "sha256:f8b848a05f17cb4d53d245c59bf560372b9778d4cfaf9705f6245009bf9f65f3"}, + {file = "types-Pillow-9.4.0.19.tar.gz", hash = "sha256:a04401181979049977e318dae4523ab5ae8246314fc68fcf50b043ac885a5468"}, + {file = "types_Pillow-9.4.0.19-py3-none-any.whl", hash = "sha256:b55f2508be21e68a39f0a41830f1f1725aba0888e727e2eccd253c78cd5357a5"}, ] [[package]] From ce40330de1e8c6e8cd15e3db2939b3583c980620 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 09:19:02 -0400 Subject: [PATCH 074/106] Bump parameterized from 0.8.1 to 0.9.0 (#15412) --- changelog.d/15412.misc | 1 + poetry.lock | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15412.misc diff --git a/changelog.d/15412.misc b/changelog.d/15412.misc new file mode 100644 index 000000000000..bdfb84ee90a5 --- /dev/null +++ b/changelog.d/15412.misc @@ -0,0 +1 @@ +Bump parameterized from 0.8.1 to 0.9.0. diff --git a/poetry.lock b/poetry.lock index c4eb368174f2..16f4baa4782e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1605,14 +1605,14 @@ files = [ [[package]] name = "parameterized" -version = "0.8.1" +version = "0.9.0" description = "Parameterized testing with any Python test framework" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "parameterized-0.8.1-py2.py3-none-any.whl", hash = "sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9"}, - {file = "parameterized-0.8.1.tar.gz", hash = "sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c"}, + {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, + {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, ] [package.extras] From 0bc10611196eab9dbd4e852cf37f8d4393418848 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 09:36:05 -0400 Subject: [PATCH 075/106] Bump sentry-sdk from 1.17.0 to 1.19.1 (#15414) --- changelog.d/15414.misc | 1 + poetry.lock | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15414.misc diff --git a/changelog.d/15414.misc b/changelog.d/15414.misc new file mode 100644 index 000000000000..3716f40e2530 --- /dev/null +++ b/changelog.d/15414.misc @@ -0,0 +1 @@ +Bump sentry-sdk from 1.17.0 to 1.19.1. diff --git a/poetry.lock b/poetry.lock index 16f4baa4782e..bc0cccbe5821 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2382,14 +2382,14 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.17.0" +version = "1.19.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.17.0.tar.gz", hash = "sha256:ad40860325c94d1a656da70fba5a7c4dbb2f6809d3cc2d00f74ca0b608330f14"}, - {file = "sentry_sdk-1.17.0-py2.py3-none-any.whl", hash = "sha256:3c4e898f7a3edf5a2042cd0dcab6ee124e2112189228c272c08ad15d3850c201"}, + {file = "sentry-sdk-1.19.1.tar.gz", hash = "sha256:7ae78bd921981a5010ab540d6bdf3b793659a4db8cccf7f16180702d48a80d84"}, + {file = "sentry_sdk-1.19.1-py2.py3-none-any.whl", hash = "sha256:885a11c69df23e53eb281d003b9ff15a5bdfa43d8a2a53589be52104a1b4582f"}, ] [package.dependencies] @@ -2407,6 +2407,7 @@ django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] +grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] From c94307adfe71f35a3852ba28b13b145f66cc8308 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 09:36:56 -0400 Subject: [PATCH 076/106] Bump immutabledict from 2.2.3 to 2.2.4 (#15415) --- changelog.d/15415.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15415.misc diff --git a/changelog.d/15415.misc b/changelog.d/15415.misc new file mode 100644 index 000000000000..47aeee0b2b48 --- /dev/null +++ b/changelog.d/15415.misc @@ -0,0 +1 @@ +Bump immutabledict from 2.2.3 to 2.2.4. diff --git a/poetry.lock b/poetry.lock index bc0cccbe5821..353b79fa5e39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -855,14 +855,14 @@ files = [ [[package]] name = "immutabledict" -version = "2.2.3" +version = "2.2.4" description = "Immutable wrapper around dictionaries (a fork of frozendict)" category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "immutabledict-2.2.3-py3-none-any.whl", hash = "sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714"}, - {file = "immutabledict-2.2.3.tar.gz", hash = "sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853"}, + {file = "immutabledict-2.2.4-py3-none-any.whl", hash = "sha256:c827715c147d2364522f9a7709cc424c7001015274a3c705250e673605bde64b"}, + {file = "immutabledict-2.2.4.tar.gz", hash = "sha256:3bedc0741faaa2846f6edf5c29183f993da3abaff6a5961bb70a5659bb9e68ab"}, ] [[package]] From c1b7da69ccd217145892157777b367924fa0028d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Apr 2023 14:18:54 +0100 Subject: [PATCH 077/106] 1.81.0 --- CHANGES.md | 10 ++++++++-- debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 70f1e0cbd05c..182d7ddd899d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,5 @@ -Synapse 1.81.0rc2 (2023-04-06) -============================== +Synapse 1.81.0 (2023-04-11) +=========================== Synapse now attempts the versioned appservice paths before falling back to the [legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). @@ -14,6 +14,12 @@ A future version of Synapse (v1.88.0 or later) will remove support for legacy application service routes and query parameter authorization. +No significant changes since 1.81.0rc2. + + +Synapse 1.81.0rc2 (2023-04-06) +============================== + Bugfixes -------- diff --git a/debian/changelog b/debian/changelog index 850afe12a93f..9eee66fda3a0 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.81.0) stable; urgency=medium + + * New Synapse release 1.81.0. + + -- Synapse Packaging team Tue, 11 Apr 2023 14:18:35 +0100 + matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium * New Synapse release 1.81.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 1386dd859f85..3ffd96c50dc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.81.0rc2" +version = "1.81.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 253e86a72e0c8e4e014b4b08fa587afe1de4db22 Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Wed, 12 Apr 2023 12:28:46 +0100 Subject: [PATCH 078/106] Throw if the appservice config list is the wrong type (#15425) * raise a ConfigError on an invalid app_service_config_files * changelog * Move config check to read_config * Add test * Ensure list also contains strings --- changelog.d/15425.bugfix | 1 + synapse/config/appservice.py | 14 ++++++++---- tests/config/test_appservice.py | 40 +++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15425.bugfix create mode 100644 tests/config/test_appservice.py diff --git a/changelog.d/15425.bugfix b/changelog.d/15425.bugfix new file mode 100644 index 000000000000..fd104a63b3bd --- /dev/null +++ b/changelog.d/15425.bugfix @@ -0,0 +1 @@ +Synapse now correctly fails to start if the config option `app_service_config_files` is not a list. \ No newline at end of file diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 00182090b2ea..fd89960e7261 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -33,6 +33,16 @@ class AppServiceConfig(Config): def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.app_service_config_files = config.get("app_service_config_files", []) + if not isinstance(self.app_service_config_files, list) or not all( + type(x) is str for x in self.app_service_config_files + ): + # type-ignore: this function gets arbitrary json value; we do use this path. + raise ConfigError( + "Expected '%s' to be a list of AS config files:" + % (self.app_service_config_files), + "app_service_config_files", + ) + self.track_appservice_user_ips = config.get("track_appservice_user_ips", False) @@ -40,10 +50,6 @@ def load_appservices( hostname: str, config_files: List[str] ) -> List[ApplicationService]: """Returns a list of Application Services from the config files.""" - if not isinstance(config_files, list): - # type-ignore: this function gets arbitrary json value; we do use this path. - logger.warning("Expected %s to be a list of AS config files.", config_files) # type: ignore[unreachable] - return [] # Dicts of value -> filename seen_as_tokens: Dict[str, str] = {} diff --git a/tests/config/test_appservice.py b/tests/config/test_appservice.py new file mode 100644 index 000000000000..d2d1a40dfcb8 --- /dev/null +++ b/tests/config/test_appservice.py @@ -0,0 +1,40 @@ +# Copyright 2023 Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.config.appservice import AppServiceConfig, ConfigError + +from tests.unittest import TestCase + + +class AppServiceConfigTest(TestCase): + def test_invalid_app_service_config_files(self) -> None: + for invalid_value in [ + "foobar", + 1, + None, + True, + False, + {}, + ["foo", "bar", False], + ]: + with self.assertRaises(ConfigError): + AppServiceConfig().read_config( + {"app_service_config_files": invalid_value} + ) + + def test_valid_app_service_config_files(self) -> None: + AppServiceConfig().read_config({"app_service_config_files": []}) + AppServiceConfig().read_config( + {"app_service_config_files": ["/not/a/real/path", "/not/a/real/path/2"]} + ) From be36600327b47b93f8462bdf343c4c12f6c966b9 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 13 Apr 2023 13:28:55 +0200 Subject: [PATCH 079/106] Disable loading `RefreshTokenServlet` on workers (#15428) --- changelog.d/15428.bugfix | 1 + synapse/rest/client/login.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15428.bugfix diff --git a/changelog.d/15428.bugfix b/changelog.d/15428.bugfix new file mode 100644 index 000000000000..1083f00b81d6 --- /dev/null +++ b/changelog.d/15428.bugfix @@ -0,0 +1 @@ +Disable loading `RefreshTokenServlet` (`/_matrix/client/(r0|v3|unstable)/refresh`) on workers. \ No newline at end of file diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index b7e9c8f6b572..32c2f5ce0c5e 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -670,7 +670,10 @@ async def on_GET(self, request: SynapseRequest) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: LoginRestServlet(hs).register(http_server) - if hs.config.registration.refreshable_access_token_lifetime is not None: + if ( + hs.config.worker.worker_app is None + and hs.config.registration.refreshable_access_token_lifetime is not None + ): RefreshTokenServlet(hs).register(http_server) SsoRedirectServlet(hs).register(http_server) if hs.config.cas.cas_enabled: From c9723a1c1fbae1cc172fc9257fd1f1f259d2a23f Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 13 Apr 2023 15:08:00 +0200 Subject: [PATCH 080/106] Only load the SSO redirect servlet if SSO is enabled. (#15421) --- changelog.d/15421.misc | 1 + synapse/rest/client/login.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15421.misc diff --git a/changelog.d/15421.misc b/changelog.d/15421.misc new file mode 100644 index 000000000000..5deea3ac5b4b --- /dev/null +++ b/changelog.d/15421.misc @@ -0,0 +1 @@ +Only load the SSO redirect servlet if SSO is enabled. \ No newline at end of file diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 32c2f5ce0c5e..a3487201310b 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -675,7 +675,12 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: and hs.config.registration.refreshable_access_token_lifetime is not None ): RefreshTokenServlet(hs).register(http_server) - SsoRedirectServlet(hs).register(http_server) + if ( + hs.config.cas.cas_enabled + or hs.config.saml2.saml2_enabled + or hs.config.oidc.oidc_enabled + ): + SsoRedirectServlet(hs).register(http_server) if hs.config.cas.cas_enabled: CasTicketServlet(hs).register(http_server) From 2503126d5245586b89c76e5f15f27c0a07774a45 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 13 Apr 2023 09:47:07 -0400 Subject: [PATCH 081/106] Implement MSC2174: move redacts to a content property. (#15395) This moves `redacts` from being a top-level property to a `content` property in a new room version. MSC2176 (which was previously implemented) states to not `redact` this property. --- changelog.d/15395.misc | 1 + synapse/api/room_versions.py | 3 ++- synapse/event_auth.py | 2 +- synapse/events/__init__.py | 8 +++++- synapse/events/builder.py | 4 ++- synapse/rest/client/room.py | 35 ++++++++++++++++++------- tests/events/test_utils.py | 12 +++++++-- tests/rest/client/test_redactions.py | 39 ++++++++++++++++++++++++++-- 8 files changed, 87 insertions(+), 17 deletions(-) create mode 100644 changelog.d/15395.misc diff --git a/changelog.d/15395.misc b/changelog.d/15395.misc new file mode 100644 index 000000000000..ee938452416c --- /dev/null +++ b/changelog.d/15395.misc @@ -0,0 +1 @@ +Implement [MSC2174](https://github.com/matrix-org/matrix-spec-proposals/pull/2174) to move the `redacts` key to a `content` property. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 3dcae12161d7..5d9c13e3c3fc 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -80,7 +80,8 @@ class RoomVersion: limit_notifications_power_levels: bool # MSC2175: No longer include the creator in m.room.create events. msc2175_implicit_room_creator: bool - # MSC2174/MSC2176: Apply updated redaction rules algorithm. + # MSC2174/MSC2176: Apply updated redaction rules algorithm, move redacts to + # content property. msc2176_redaction_rules: bool # MSC3083: Support the 'restricted' join_rule. msc3083_join_rules: bool diff --git a/synapse/event_auth.py b/synapse/event_auth.py index f95d00d4729b..25898b95a570 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -793,7 +793,7 @@ def check_redaction( """Check whether the event sender is allowed to redact the target event. Returns: - True if the the sender is allowed to redact the target event if the + True if the sender is allowed to redact the target event if the target event was created by them. False if the sender is allowed to redact the target event with no further checks. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index d475fe7ae526..4501518cf0e5 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -326,7 +326,6 @@ def __init__( hashes: DictProperty[Dict[str, str]] = DictProperty("hashes") origin: DictProperty[str] = DictProperty("origin") origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts") - redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None) room_id: DictProperty[str] = DictProperty("room_id") sender: DictProperty[str] = DictProperty("sender") # TODO state_key should be Optional[str]. This is generally asserted in Synapse @@ -346,6 +345,13 @@ def event_id(self) -> str: def membership(self) -> str: return self.content["membership"] + @property + def redacts(self) -> Optional[str]: + """MSC2176 moved the redacts field into the content.""" + if self.room_version.msc2176_redaction_rules: + return self.content.get("redacts") + return self.get("redacts") + def is_state(self) -> bool: return self.get_state_key() is not None diff --git a/synapse/events/builder.py b/synapse/events/builder.py index c82745275f94..a254548c6c76 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -173,7 +173,9 @@ async def build( if self.is_state(): event_dict["state_key"] = self._state_key - if self._redacts is not None: + # MSC2174 moves the redacts property to the content, it is invalid to + # provide it as a top-level property. + if self._redacts is not None and not self.room_version.msc2176_redaction_rules: event_dict["redacts"] = self._redacts if self._origin_server_ts is not None: diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index c0705d42914e..7699cc8d1bc0 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -1096,6 +1096,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() self.auth = hs.get_auth() + self._store = hs.get_datastores().main self._relation_handler = hs.get_relations_handler() self._msc3912_enabled = hs.config.experimental.msc3912_enabled @@ -1113,6 +1114,19 @@ async def _do( ) -> Tuple[int, JsonDict]: content = parse_json_object_from_request(request) + # Ensure the redacts property in the content matches the one provided in + # the URL. + room_version = await self._store.get_room_version(room_id) + if room_version.msc2176_redaction_rules: + if "redacts" in content and content["redacts"] != event_id: + raise SynapseError( + 400, + "Cannot provide a redacts value incoherent with the event_id of the URL parameter", + Codes.INVALID_PARAM, + ) + else: + content["redacts"] = event_id + try: with_relations = None if self._msc3912_enabled and "org.matrix.msc3912.with_relations" in content: @@ -1128,20 +1142,23 @@ async def _do( requester, txn_id, room_id ) + # Event is not yet redacted, create a new event to redact it. if event is None: + event_dict = { + "type": EventTypes.Redaction, + "content": content, + "room_id": room_id, + "sender": requester.user.to_string(), + } + # Earlier room versions had a top-level redacts property. + if not room_version.msc2176_redaction_rules: + event_dict["redacts"] = event_id + ( event, _, ) = await self.event_creation_handler.create_and_send_nonmember_event( - requester, - { - "type": EventTypes.Redaction, - "content": content, - "room_id": room_id, - "sender": requester.user.to_string(), - "redacts": event_id, - }, - txn_id=txn_id, + requester, event_dict, txn_id=txn_id ) if with_relations: diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index c35f58f46213..1b179acb20fc 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -318,7 +318,11 @@ def test_redacts(self) -> None: """Redaction events have no special behaviour until MSC2174/MSC2176.""" self.run_test( - {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {"redacts": "$test2:domain"}, + "redacts": "$test2:domain", + }, { "type": "m.room.redaction", "content": {}, @@ -330,7 +334,11 @@ def test_redacts(self) -> None: # After MSC2174, redaction events keep the redacts content key. self.run_test( - {"type": "m.room.redaction", "content": {"redacts": "$test2:domain"}}, + { + "type": "m.room.redaction", + "content": {"redacts": "$test2:domain"}, + "redacts": "$test2:domain", + }, { "type": "m.room.redaction", "content": {"redacts": "$test2:domain"}, diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index 5dfe44defbb3..84a60c0b0721 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -16,6 +16,7 @@ from twisted.test.proto_helpers import MemoryReactor from synapse.api.constants import EventTypes, RelationTypes +from synapse.api.room_versions import RoomVersions from synapse.rest import admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer @@ -74,6 +75,7 @@ def _redact_event( event_id: str, expect_code: int = 200, with_relations: Optional[List[str]] = None, + content: Optional[JsonDict] = None, ) -> JsonDict: """Helper function to send a redaction event. @@ -81,7 +83,7 @@ def _redact_event( """ path = "/_matrix/client/r0/rooms/%s/redact/%s" % (room_id, event_id) - request_content = {} + request_content = content or {} if with_relations: request_content["org.matrix.msc3912.with_relations"] = with_relations @@ -92,7 +94,7 @@ def _redact_event( return channel.json_body def _sync_room_timeline(self, access_token: str, room_id: str) -> List[JsonDict]: - channel = self.make_request("GET", "sync", access_token=self.mod_access_token) + channel = self.make_request("GET", "sync", access_token=access_token) self.assertEqual(channel.code, 200) room_sync = channel.json_body["rooms"]["join"][room_id] return room_sync["timeline"]["events"] @@ -466,3 +468,36 @@ def test_redact_relations_txn_id_reuse(self) -> None: ) self.assertIn("body", event_dict["content"], event_dict) self.assertEqual("I'm in a thread!", event_dict["content"]["body"]) + + def test_content_redaction(self) -> None: + """MSC2174 moved the redacts property to the content.""" + # Create a room with the newer room version. + room_id = self.helper.create_room_as( + self.mod_user_id, + tok=self.mod_access_token, + room_version=RoomVersions.MSC2176.identifier, + ) + + # Create an event. + b = self.helper.send(room_id=room_id, tok=self.mod_access_token) + event_id = b["event_id"] + + # Attempt to redact it with a bogus event ID. + self._redact_event( + self.mod_access_token, + room_id, + event_id, + expect_code=400, + content={"redacts": "foo"}, + ) + + # Redact it for real. + self._redact_event(self.mod_access_token, room_id, event_id) + + # Sync the room, to get the id of the create event + timeline = self._sync_room_timeline(self.mod_access_token, room_id) + redact_event = timeline[-1] + self.assertEqual(redact_event["type"], EventTypes.Redaction) + # The redacts key should be in the content. + self.assertNotIn("redacts", redact_event) + self.assertEquals(redact_event["content"]["redacts"], event_id) From 38272be03710f0675d7f73d15a8a9c4398619b68 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 13 Apr 2023 14:06:25 +0000 Subject: [PATCH 082/106] Add comma missing from #15382. (#15429) * Add missing comma * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) --------- Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/15429.misc | 1 + synapse/storage/databases/main/event_federation.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15429.misc diff --git a/changelog.d/15429.misc b/changelog.d/15429.misc new file mode 100644 index 000000000000..c5b054d19e72 --- /dev/null +++ b/changelog.d/15429.misc @@ -0,0 +1 @@ +Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 9e6011e8ea18..2ad6fa7d5efe 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -1708,7 +1708,7 @@ def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None: WHERE stream_ordering < ? """ txn.execute( - sql, (self.stream_ordering_month_ago) # type: ignore[attr-defined] + sql, (self.stream_ordering_month_ago,) # type: ignore[attr-defined] ) await self.db_pool.runInteraction( From edae20f926d9d1225111f1d40a1073ce3f1d3fb7 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 13 Apr 2023 14:35:03 +0000 Subject: [PATCH 083/106] Improve robustness when handling a perspective key response by deduplicating received server keys. (#15423) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Change `store_server_verify_keys` to take a `Mapping[(str, str), FKR]` This is because we already can't handle duplicate keys — leads to cardinality violation * Newsfile Signed-off-by: Olivier Wilkinson (reivilibre) --------- Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/15423.bugfix | 1 + synapse/crypto/keyring.py | 26 ++++++++++++++++++++++---- synapse/storage/databases/main/keys.py | 6 +++--- tests/crypto/test_keyring.py | 4 ++-- tests/storage/test_keys.py | 18 +++++++++--------- tests/unittest.py | 16 ++++++---------- 6 files changed, 43 insertions(+), 28 deletions(-) create mode 100644 changelog.d/15423.bugfix diff --git a/changelog.d/15423.bugfix b/changelog.d/15423.bugfix new file mode 100644 index 000000000000..dfb60ddd2fc1 --- /dev/null +++ b/changelog.d/15423.bugfix @@ -0,0 +1 @@ +Improve robustness when handling a perspective key response by deduplicating received server keys. \ No newline at end of file diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index d710607c6367..d2f99dc2acd1 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -721,7 +721,7 @@ async def get_server_verify_key_v2_indirect( ) keys: Dict[str, Dict[str, FetchKeyResult]] = {} - added_keys: List[Tuple[str, str, FetchKeyResult]] = [] + added_keys: Dict[Tuple[str, str], FetchKeyResult] = {} time_now_ms = self.clock.time_msec() @@ -752,9 +752,27 @@ async def get_server_verify_key_v2_indirect( # we continue to process the rest of the response continue - added_keys.extend( - (server_name, key_id, key) for key_id, key in processed_response.items() - ) + for key_id, key in processed_response.items(): + dict_key = (server_name, key_id) + if dict_key in added_keys: + already_present_key = added_keys[dict_key] + logger.warning( + "Duplicate server keys for %s (%s) from perspective %s (%r, %r)", + server_name, + key_id, + perspective_name, + already_present_key, + key, + ) + + if already_present_key.valid_until_ts > key.valid_until_ts: + # Favour the entry with the largest valid_until_ts, + # as `old_verify_keys` are also collected from this + # response. + continue + + added_keys[dict_key] = key + keys.setdefault(server_name, {}).update(processed_response) await self.store.store_server_verify_keys( diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index 0a19f607bda1..89c37a4eb560 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -15,7 +15,7 @@ import itertools import logging -from typing import Any, Dict, Iterable, List, Optional, Tuple +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple from signedjson.key import decode_verify_key_bytes @@ -95,7 +95,7 @@ async def store_server_verify_keys( self, from_server: str, ts_added_ms: int, - verify_keys: Iterable[Tuple[str, str, FetchKeyResult]], + verify_keys: Mapping[Tuple[str, str], FetchKeyResult], ) -> None: """Stores NACL verification keys for remote servers. Args: @@ -108,7 +108,7 @@ async def store_server_verify_keys( key_values = [] value_values = [] invalidations = [] - for server_name, key_id, fetch_result in verify_keys: + for (server_name, key_id), fetch_result in verify_keys.items(): key_values.append((server_name, key_id)) value_values.append( ( diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 1b9696748fdc..66102ab93487 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -193,7 +193,7 @@ def test_verify_json_for_server(self) -> None: r = self.hs.get_datastores().main.store_server_verify_keys( "server9", int(time.time() * 1000), - [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))], + {("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), 1000)}, ) self.get_success(r) @@ -291,7 +291,7 @@ def test_verify_json_for_server_with_null_valid_until_ms(self) -> None: # None is not a valid value in FetchKeyResult, but we're abusing this # API to insert null values into the database. The nulls get converted # to 0 when fetched in KeyStore.get_server_verify_keys. - [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], # type: ignore[arg-type] + {("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), None)}, # type: ignore[arg-type] ) self.get_success(r) diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index ba68171ad7fb..5901d80f26d7 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -46,10 +46,10 @@ def test_get_server_verify_keys(self) -> None: store.store_server_verify_keys( "from_server", 10, - [ - ("server1", key_id_1, FetchKeyResult(KEY_1, 100)), - ("server1", key_id_2, FetchKeyResult(KEY_2, 200)), - ], + { + ("server1", key_id_1): FetchKeyResult(KEY_1, 100), + ("server1", key_id_2): FetchKeyResult(KEY_2, 200), + }, ) ) @@ -90,10 +90,10 @@ def test_cache(self) -> None: store.store_server_verify_keys( "from_server", 0, - [ - ("srv1", key_id_1, FetchKeyResult(KEY_1, 100)), - ("srv1", key_id_2, FetchKeyResult(KEY_2, 200)), - ], + { + ("srv1", key_id_1): FetchKeyResult(KEY_1, 100), + ("srv1", key_id_2): FetchKeyResult(KEY_2, 200), + }, ) ) @@ -119,7 +119,7 @@ def test_cache(self) -> None: signedjson.key.generate_signing_key("key2") ) d = store.store_server_verify_keys( - "from_server", 10, [("srv1", key_id_2, FetchKeyResult(new_key_2, 300))] + "from_server", 10, {("srv1", key_id_2): FetchKeyResult(new_key_2, 300)} ) self.get_success(d) diff --git a/tests/unittest.py b/tests/unittest.py index 8a16fd366592..93fee1c0e6e4 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -793,16 +793,12 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: hs.get_datastores().main.store_server_verify_keys( from_server=self.OTHER_SERVER_NAME, ts_added_ms=clock.time_msec(), - verify_keys=[ - ( - self.OTHER_SERVER_NAME, - verify_key_id, - FetchKeyResult( - verify_key=verify_key, - valid_until_ts=clock.time_msec() + 10000, - ), - ) - ], + verify_keys={ + (self.OTHER_SERVER_NAME, verify_key_id): FetchKeyResult( + verify_key=verify_key, + valid_until_ts=clock.time_msec() + 10000, + ), + }, ) ) From d751f65e71bef80ec4181e4495c954551ddf33f7 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 13 Apr 2023 11:36:29 -0400 Subject: [PATCH 084/106] Remove registration fallback code. (#15405) The registration fallback is broken and unspecced. This removes it since there is no plan to spec it. Note that this does not modify the login fallback code. --- changelog.d/15405.removal | 1 + synapse/res/templates/recaptcha.html | 6 +- synapse/res/templates/registration_token.html | 6 +- synapse/res/templates/style.css | 4 + synapse/res/templates/terms.html | 7 +- synapse/static/client/register/index.html | 34 ----- .../client/register/js/jquery-3.4.1.min.js | 2 - synapse/static/client/register/js/register.js | 117 ------------------ .../client/register/register_config.sample.js | 3 - synapse/static/client/register/style.css | 64 ---------- 10 files changed, 17 insertions(+), 227 deletions(-) create mode 100644 changelog.d/15405.removal delete mode 100644 synapse/static/client/register/index.html delete mode 100644 synapse/static/client/register/js/jquery-3.4.1.min.js delete mode 100644 synapse/static/client/register/js/register.js delete mode 100644 synapse/static/client/register/register_config.sample.js delete mode 100644 synapse/static/client/register/style.css diff --git a/changelog.d/15405.removal b/changelog.d/15405.removal new file mode 100644 index 000000000000..83340041539f --- /dev/null +++ b/changelog.d/15405.removal @@ -0,0 +1 @@ +Remove the broken, unspecced registration fallback. Note that the *login* fallback is unaffected by this change. diff --git a/synapse/res/templates/recaptcha.html b/synapse/res/templates/recaptcha.html index f00992a24be8..b80e5e8f2469 100644 --- a/synapse/res/templates/recaptcha.html +++ b/synapse/res/templates/recaptcha.html @@ -3,7 +3,11 @@ {% block header %} - + - - - - - -
-
- Create account:
- -
- -
- -
- -
- -
-
- - -
-
-
- - diff --git a/synapse/static/client/register/js/jquery-3.4.1.min.js b/synapse/static/client/register/js/jquery-3.4.1.min.js deleted file mode 100644 index a1c07fd803b5..000000000000 --- a/synapse/static/client/register/js/jquery-3.4.1.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ -!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;nx",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="
",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0 "+JSON.stringify(response)); - submitPassword(user, pwd, response.session); - }).fail(function(err) { - Recaptcha.reload(); - errorFunc(err); - }); -}; - -var submitPassword = function(user, pwd, session) { - console.log("Registering..."); - var data = { - type: "m.login.password", - user: user, - password: pwd, - session: session - }; - $.post(matrixRegistration.endpoint, JSON.stringify(data), function(response) { - matrixRegistration.onRegistered( - response.home_server, response.user_id, response.access_token - ); - }).fail(errorFunc); -}; - -var errorFunc = function(err) { - if (err.responseJSON && err.responseJSON.error) { - setFeedbackString(err.responseJSON.error + " (" + err.responseJSON.errcode + ")"); - } - else { - setFeedbackString("Request failed: " + err.status); - } -}; - -var setFeedbackString = function(text) { - $("#feedback").text(text); -}; - -matrixRegistration.onLoad = function() { - setupCaptcha(); -}; - -matrixRegistration.signUp = function() { - var user = $("#desired_user_id").val(); - if (user.length == 0) { - setFeedbackString("Must specify a username."); - return; - } - var pwd1 = $("#pwd1").val(); - var pwd2 = $("#pwd2").val(); - if (pwd1.length < 6) { - setFeedbackString("Password: min. 6 characters."); - return; - } - if (pwd1 != pwd2) { - setFeedbackString("Passwords do not match."); - return; - } - if (window.matrixRegistration.isUsingRecaptcha) { - submitCaptcha(user, pwd1); - } - else { - submitPassword(user, pwd1); - } -}; - -matrixRegistration.onRegistered = function(hs_url, user_id, access_token) { - // clobber this function - console.warn("onRegistered - This function should be replaced to proceed."); -}; diff --git a/synapse/static/client/register/register_config.sample.js b/synapse/static/client/register/register_config.sample.js deleted file mode 100644 index c7ea180dee29..000000000000 --- a/synapse/static/client/register/register_config.sample.js +++ /dev/null @@ -1,3 +0,0 @@ -window.matrixRegistrationConfig = { - recaptcha_public_key: "YOUR_PUBLIC_KEY" -}; diff --git a/synapse/static/client/register/style.css b/synapse/static/client/register/style.css deleted file mode 100644 index 8a39b5d0f576..000000000000 --- a/synapse/static/client/register/style.css +++ /dev/null @@ -1,64 +0,0 @@ -html { - height: 100%; -} - -body { - height: 100%; - font-family: "Myriad Pro", "Myriad", Helvetica, Arial, sans-serif; - font-size: 12pt; - margin: 0px; -} - -h1 { - font-size: 20pt; -} - -a:link { color: #666; } -a:visited { color: #666; } -a:hover { color: #000; } -a:active { color: #000; } - -input { - width: 100% -} - -textarea, input { - font-family: inherit; - font-size: inherit; -} - -.smallPrint { - color: #888; - font-size: 9pt ! important; - font-style: italic ! important; -} - -#recaptcha_area { - margin: auto -} - -.g-recaptcha div { - margin: auto; -} - -#registrationForm { - text-align: left; - padding: 5px; - margin-bottom: 40px; - display: inline-block; - - -webkit-border-radius: 10px; - -moz-border-radius: 10px; - border-radius: 10px; - - -webkit-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15); - -moz-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15); - box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15); - - background-color: #f8f8f8; - border: 1px #ccc solid; -} - -.error { - color: red; -} From 4af0aec54dad261bcad240d8a878a1c16934e77c Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 11:24:06 +0200 Subject: [PATCH 085/106] Load `/directory/room/{roomAlias}` endpoint on workers (#15333) * Enable `directory` * move to worker store * newsfile * disable `ClientDirectoryListServer` and `ClientAppserviceDirectoryListServer` for workers --- changelog.d/15333.feature | 1 + docker/configure_workers_and_start.py | 1 + docs/workers.md | 1 + synapse/rest/__init__.py | 3 +-- synapse/rest/client/directory.py | 6 ++++-- synapse/storage/databases/main/directory.py | 6 ++++-- 6 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 changelog.d/15333.feature diff --git a/changelog.d/15333.feature b/changelog.d/15333.feature new file mode 100644 index 000000000000..35ea89ad8982 --- /dev/null +++ b/changelog.d/15333.feature @@ -0,0 +1 @@ +Allow loading `/directory/room/{roomAlias}` endpoint on workers. \ No newline at end of file diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 2a50ee1e4b4e..26f92b3f1ac1 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -173,6 +173,7 @@ "^/_matrix/client/(api/v1|r0|v3|unstable)/search", "^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)", "^/_matrix/client/(r0|v3|unstable)/password_policy$", + "^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index e9a477d32c89..cb2333e4a599 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -234,6 +234,7 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases ^/_matrix/client/(api/v1|r0|v3|unstable)/search$ ^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$) + ^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$ # Encryption requests ^/_matrix/client/(r0|v3|unstable)/keys/query$ diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 1d7c11b42d09..19603ed1370b 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -100,8 +100,7 @@ def register_servlets(client_resource: HttpServer, hs: "HomeServer") -> None: login.register_servlets(hs, client_resource) profile.register_servlets(hs, client_resource) presence.register_servlets(hs, client_resource) - if is_main_process: - directory.register_servlets(hs, client_resource) + directory.register_servlets(hs, client_resource) voip.register_servlets(hs, client_resource) if is_main_process: pusher.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index f17b4c8d2229..570bb52747ab 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -39,12 +39,14 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ClientDirectoryServer(hs).register(http_server) - ClientDirectoryListServer(hs).register(http_server) - ClientAppserviceDirectoryListServer(hs).register(http_server) + if hs.config.worker.worker_app is None: + ClientDirectoryListServer(hs).register(http_server) + ClientAppserviceDirectoryListServer(hs).register(http_server) class ClientDirectoryServer(RestServlet): PATTERNS = client_patterns("/directory/room/(?P[^/]*)$", v1=True) + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py index 44aa181174ac..3cb4c9072907 100644 --- a/synapse/storage/databases/main/directory.py +++ b/synapse/storage/databases/main/directory.py @@ -129,8 +129,6 @@ def alias_txn(txn: LoggingTransaction) -> None: 409, "Room alias %s already exists" % room_alias.to_string() ) - -class DirectoryStore(DirectoryWorkerStore): async def delete_room_alias(self, room_alias: RoomAlias) -> Optional[str]: room_id = await self.db_pool.runInteraction( "delete_room_alias", self._delete_room_alias_txn, room_alias @@ -201,3 +199,7 @@ def _update_aliases_for_room_txn(txn: LoggingTransaction) -> None: await self.db_pool.runInteraction( "_update_aliases_for_room_txn", _update_aliases_for_room_txn ) + + +class DirectoryStore(DirectoryWorkerStore): + pass From de4390cd404a8cad37605fbb3afce8773cecd672 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 12:48:35 +0200 Subject: [PATCH 086/106] Convert async to normal tests in `TestSSOHandler` (#15433) * Convert async to normal tests in `TestSSOHandler` * newsfile --- changelog.d/15433.misc | 1 + tests/handlers/test_sso.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15433.misc diff --git a/changelog.d/15433.misc b/changelog.d/15433.misc new file mode 100644 index 000000000000..f1d83506bcd9 --- /dev/null +++ b/changelog.d/15433.misc @@ -0,0 +1 @@ +Convert async to normal tests in `TestSSOHandler`. \ No newline at end of file diff --git a/tests/handlers/test_sso.py b/tests/handlers/test_sso.py index d6f43a98fcda..620ae3a4ba54 100644 --- a/tests/handlers/test_sso.py +++ b/tests/handlers/test_sso.py @@ -35,7 +35,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: ) return hs - async def test_set_avatar(self) -> None: + def test_set_avatar(self) -> None: """Tests successfully setting the avatar of a newly created user""" handler = self.hs.get_sso_handler() @@ -54,7 +54,7 @@ async def test_set_avatar(self) -> None: self.assertIsNot(profile["avatar_url"], None) @unittest.override_config({"max_avatar_size": 1}) - async def test_set_avatar_too_big_image(self) -> None: + def test_set_avatar_too_big_image(self) -> None: """Tests that saving an avatar fails when it is too big""" handler = self.hs.get_sso_handler() @@ -66,7 +66,7 @@ async def test_set_avatar_too_big_image(self) -> None: ) @unittest.override_config({"allowed_avatar_mimetypes": ["image/jpeg"]}) - async def test_set_avatar_incorrect_mime_type(self) -> None: + def test_set_avatar_incorrect_mime_type(self) -> None: """Tests that saving an avatar fails when its mime type is not allowed""" handler = self.hs.get_sso_handler() @@ -77,7 +77,7 @@ async def test_set_avatar_incorrect_mime_type(self) -> None: self.get_success(handler.set_avatar(user_id, "http://my.server/me.png")) ) - async def test_skip_saving_avatar_when_not_changed(self) -> None: + def test_skip_saving_avatar_when_not_changed(self) -> None: """Tests whether saving of avatar correctly skips if the avatar hasn't changed""" handler = self.hs.get_sso_handler() From dabbb94fafd335966bbdb5bd2187678872731a0d Mon Sep 17 00:00:00 2001 From: Mathieu Velten Date: Fri, 14 Apr 2023 14:12:37 +0200 Subject: [PATCH 087/106] Delete pushers after calling on_logged_out module hook on device delete (#15410) --- changelog.d/15410.bugfix | 1 + .../password_auth_provider_callbacks.md | 3 ++ synapse/handlers/device.py | 6 ++- tests/module_api/test_api.py | 51 ++++++++++++++++++- 4 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15410.bugfix diff --git a/changelog.d/15410.bugfix b/changelog.d/15410.bugfix new file mode 100644 index 000000000000..eb540e33c5fe --- /dev/null +++ b/changelog.d/15410.bugfix @@ -0,0 +1 @@ +Fix and document untold assumption that `on_logged_out` module hooks will be called before pushers deletion. diff --git a/docs/modules/password_auth_provider_callbacks.md b/docs/modules/password_auth_provider_callbacks.md index f6349d5404f4..8275f7ebdc67 100644 --- a/docs/modules/password_auth_provider_callbacks.md +++ b/docs/modules/password_auth_provider_callbacks.md @@ -103,6 +103,9 @@ Called during a logout request for a user. It is passed the qualified user ID, t deactivated device (if any: access tokens are occasionally created without an associated device ID), and the (now deactivated) access token. +Deleting the related pushers is done after calling `on_logged_out`, so you can rely on them +to still be present. + If multiple modules implement this callback, Synapse runs them all in order. ### `get_username_for_registration` diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d2063d443558..ae1d9337adc5 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -513,8 +513,6 @@ async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: else: raise - await self.hs.get_pusherpool().remove_pushers_by_devices(user_id, device_ids) - # Delete data specific to each device. Not optimised as it is not # considered as part of a critical path. for device_id in device_ids: @@ -533,6 +531,10 @@ async def delete_devices(self, user_id: str, device_ids: List[str]) -> None: f"org.matrix.msc3890.local_notification_settings.{device_id}", ) + # Pushers are deleted after `delete_access_tokens_for_user` is called so that + # modules using `on_logged_out` hook can use them if needed. + await self.hs.get_pusherpool().remove_pushers_by_devices(user_id, device_ids) + await self.notify_device_update(user_id, device_ids) async def update_device(self, user_id: str, device_id: str, content: dict) -> None: diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index 3a1929691e9c..758b4bc38bdd 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict +from typing import Any, Dict, Optional from unittest.mock import Mock from twisted.internet import defer @@ -21,6 +21,7 @@ from synapse.api.errors import NotFoundError from synapse.events import EventBase from synapse.federation.units import Transaction +from synapse.handlers.device import DeviceHandler from synapse.handlers.presence import UserPresenceState from synapse.handlers.push_rules import InvalidRuleException from synapse.module_api import ModuleApi @@ -773,6 +774,54 @@ def test_create_room(self) -> None: # Check room alias. self.assertIsNone(room_alias) + def test_on_logged_out(self) -> None: + """Test that on_logged_out module hook is properly called when logging out + a device, and that related pushers are still available at this time. + """ + device_id = "AAAAAAA" + user_id = self.register_user("test_on_logged_out", "secret") + self.login("test_on_logged_out", "secret", device_id) + + self.get_success( + self.hs.get_pusherpool().add_or_update_pusher( + user_id=user_id, + device_id=device_id, + kind="http", + app_id="m.http", + app_display_name="HTTP Push Notifications", + device_display_name="pushy push", + pushkey="a@example.com", + lang=None, + data={"url": "http://example.com/_matrix/push/v1/notify"}, + ) + ) + + # Setup a callback counting the number of pushers. + number_of_pushers_in_callback: Optional[int] = None + + async def _on_logged_out_mock( + user_id: str, device_id: Optional[str], access_token: str + ) -> None: + nonlocal number_of_pushers_in_callback + number_of_pushers_in_callback = len( + self.hs.get_pusherpool().pushers[user_id].values() + ) + + self.module_api.register_password_auth_provider_callbacks( + on_logged_out=_on_logged_out_mock + ) + + # Delete the device. + device_handler = self.hs.get_device_handler() + assert isinstance(device_handler, DeviceHandler) + self.get_success(device_handler.delete_devices(user_id, [device_id])) + + # Check that the callback was called and the pushers still existed. + self.assertEqual(number_of_pushers_in_callback, 1) + + # Ensure the pushers were deleted after the callback. + self.assertEqual(len(self.hs.get_pusherpool().pushers[user_id].values()), 0) + class ModuleApiWorkerTestCase(BaseModuleApiTestCase, BaseMultiWorkerStreamTestCase): """For testing ModuleApi functionality in a multi-worker setup""" From b5192355f6ac11eec4781d73a59b14cfc8732d1f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 14 Apr 2023 16:10:32 +0100 Subject: [PATCH 088/106] User directory background update speedup (#15435) c.f. #15264 The two changes are: 1. Add indexes so that the select / deletes don't do sequential scans 2. Don't repeatedly call `SELECT count(*)` each iteration, as that's slow --- changelog.d/15435.misc | 1 + .../storage/databases/main/user_directory.py | 89 +++++++++---------- 2 files changed, 45 insertions(+), 45 deletions(-) create mode 100644 changelog.d/15435.misc diff --git a/changelog.d/15435.misc b/changelog.d/15435.misc new file mode 100644 index 000000000000..e0f591b6d141 --- /dev/null +++ b/changelog.d/15435.misc @@ -0,0 +1 @@ +Speed up the user directory background update. diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 9fced4b99718..5d65faed164a 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -102,44 +102,34 @@ async def _populate_user_directory_createtables( ) -> int: # Get all the rooms that we want to process. def _make_staging_area(txn: LoggingTransaction) -> None: - sql = ( - "CREATE TABLE IF NOT EXISTS " - + TEMP_TABLE - + "_rooms(room_id TEXT NOT NULL, events BIGINT NOT NULL)" - ) - txn.execute(sql) - - sql = ( - "CREATE TABLE IF NOT EXISTS " - + TEMP_TABLE - + "_position(position TEXT NOT NULL)" - ) - txn.execute(sql) - - # Get rooms we want to process from the database - sql = """ - SELECT room_id, count(*) FROM current_state_events + sql = f""" + CREATE TABLE IF NOT EXISTS {TEMP_TABLE}_rooms AS + SELECT room_id, count(*) AS events + FROM current_state_events GROUP BY room_id """ txn.execute(sql) - rooms = list(txn.fetchall()) - self.db_pool.simple_insert_many_txn( - txn, TEMP_TABLE + "_rooms", keys=("room_id", "events"), values=rooms + txn.execute( + f"CREATE INDEX IF NOT EXISTS {TEMP_TABLE}_rooms_rm ON {TEMP_TABLE}_rooms (room_id)" ) - del rooms - - sql = ( - "CREATE TABLE IF NOT EXISTS " - + TEMP_TABLE - + "_users(user_id TEXT NOT NULL)" + txn.execute( + f"CREATE INDEX IF NOT EXISTS {TEMP_TABLE}_rooms_evs ON {TEMP_TABLE}_rooms (events)" ) - txn.execute(sql) - txn.execute("SELECT name FROM users") - users = list(txn.fetchall()) + sql = f""" + CREATE TABLE IF NOT EXISTS {TEMP_TABLE}_position ( + position TEXT NOT NULL + ) + """ + txn.execute(sql) - self.db_pool.simple_insert_many_txn( - txn, TEMP_TABLE + "_users", keys=("user_id",), values=users + sql = f""" + CREATE TABLE IF NOT EXISTS {TEMP_TABLE}_users AS + SELECT name AS user_id FROM users + """ + txn.execute(sql) + txn.execute( + f"CREATE INDEX IF NOT EXISTS {TEMP_TABLE}_users_idx ON {TEMP_TABLE}_users (user_id)" ) new_pos = await self.get_max_stream_id_in_current_state_deltas() @@ -222,12 +212,13 @@ def _get_next_batch( if not rooms_to_work_on: return None - # Get how many are left to process, so we can give status on how - # far we are in processing - txn.execute("SELECT COUNT(*) FROM " + TEMP_TABLE + "_rooms") - result = txn.fetchone() - assert result is not None - progress["remaining"] = result[0] + if "remaining" not in progress: + # Get how many are left to process, so we can give status on how + # far we are in processing + txn.execute("SELECT COUNT(*) FROM " + TEMP_TABLE + "_rooms") + result = txn.fetchone() + assert result is not None + progress["remaining"] = result[0] return rooms_to_work_on @@ -332,7 +323,14 @@ def _get_next_batch( if processed_event_count > batch_size: # Don't process any more rooms, we've hit our batch size. - return processed_event_count + break + + await self.db_pool.runInteraction( + "populate_user_directory", + self.db_pool.updates._background_update_progress_txn, + "populate_user_directory_process_rooms", + progress, + ) return processed_event_count @@ -356,13 +354,14 @@ def _get_next_batch(txn: LoggingTransaction) -> Optional[List[str]]: users_to_work_on = [x[0] for x in user_result] - # Get how many are left to process, so we can give status on how - # far we are in processing - sql = "SELECT COUNT(*) FROM " + TEMP_TABLE + "_users" - txn.execute(sql) - count_result = txn.fetchone() - assert count_result is not None - progress["remaining"] = count_result[0] + if "remaining" not in progress: + # Get how many are left to process, so we can give status on how + # far we are in processing + sql = "SELECT COUNT(*) FROM " + TEMP_TABLE + "_users" + txn.execute(sql) + count_result = txn.fetchone() + assert count_result is not None + progress["remaining"] = count_result[0] return users_to_work_on From e4a25d022c1e4b71e043b07324d95362f7fb4067 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 18:26:07 +0200 Subject: [PATCH 089/106] Load `/capabilities` endpoint on workers (#15436) --- changelog.d/15436.feature | 1 + docker/configure_workers_and_start.py | 1 + docs/workers.md | 1 + synapse/rest/__init__.py | 2 +- synapse/rest/client/capabilities.py | 1 + 5 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15436.feature diff --git a/changelog.d/15436.feature b/changelog.d/15436.feature new file mode 100644 index 000000000000..d83f8c3e4a57 --- /dev/null +++ b/changelog.d/15436.feature @@ -0,0 +1 @@ +Allow loading `/capabilities` endpoint on workers. \ No newline at end of file diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 26f92b3f1ac1..4beec3daaf8b 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -174,6 +174,7 @@ "^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)", "^/_matrix/client/(r0|v3|unstable)/password_policy$", "^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$", + "^/_matrix/client/(r0|v3|unstable)/capabilities$", ], "shared_extra_conf": {}, "worker_extra_conf": "", diff --git a/docs/workers.md b/docs/workers.md index cb2333e4a599..6192a46e0950 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -235,6 +235,7 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable)/search$ ^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$) ^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$ + ^/_matrix/client/(r0|v3|unstable)/capabilities$ # Encryption requests ^/_matrix/client/(r0|v3|unstable)/keys/query$ diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 19603ed1370b..1af8d99d2028 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -133,8 +133,8 @@ def register_servlets(client_resource: HttpServer, hs: "HomeServer") -> None: if is_main_process: room_upgrade_rest_servlet.register_servlets(hs, client_resource) room_batch.register_servlets(hs, client_resource) + capabilities.register_servlets(hs, client_resource) if is_main_process: - capabilities.register_servlets(hs, client_resource) account_validity.register_servlets(hs, client_resource) relations.register_servlets(hs, client_resource) password_policy.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index e84dde31b118..0dbf8f6818ab 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -33,6 +33,7 @@ class CapabilitiesRestServlet(RestServlet): """End point to expose the capabilities of the server.""" PATTERNS = client_patterns("/capabilities$") + CATEGORY = "Client API requests" def __init__(self, hs: "HomeServer"): super().__init__() From 24b61f32ff7a2f49aaf2d3d81045d2187eccce7d Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Fri, 14 Apr 2023 19:49:47 +0200 Subject: [PATCH 090/106] Disable directory listing for `StaticResource` (#15438) --- changelog.d/15438.misc | 1 + synapse/http/server.py | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100644 changelog.d/15438.misc diff --git a/changelog.d/15438.misc b/changelog.d/15438.misc new file mode 100644 index 000000000000..1edcbac7e294 --- /dev/null +++ b/changelog.d/15438.misc @@ -0,0 +1 @@ +Disable directory listing for static resources in `/_matrix/static/`. \ No newline at end of file diff --git a/synapse/http/server.py b/synapse/http/server.py index 7b760505b25a..101dc2e747d0 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -46,6 +46,13 @@ from twisted.internet.defer import CancelledError from twisted.python import failure from twisted.web import resource + +try: + from twisted.web.pages import notFound +except ImportError: + from twisted.web.resource import NoResource as notFound # type: ignore[assignment] + +from twisted.web.resource import IResource from twisted.web.server import NOT_DONE_YET, Request from twisted.web.static import File from twisted.web.util import redirectTo @@ -569,6 +576,9 @@ def render_GET(self, request: Request) -> bytes: set_clickjacking_protection_headers(request) return super().render_GET(request) + def directoryListing(self) -> IResource: + return notFound() + class UnrecognizedRequestResource(resource.Resource): """ From 8a47d6e3a685bd45237b7dae9c138209df509f64 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Fri, 14 Apr 2023 19:04:49 +0100 Subject: [PATCH 091/106] More precise type for LoggingTransaction.execute (#15432) * More precise type for LoggingTransaction.execute * Add an annotation for stream_ordering_month_ago This would have spotted the error that was fixed in "Add comma missing from #15382. (#15429)" --- changelog.d/15432.misc | 1 + synapse/storage/database.py | 20 ++++++++++++++++--- .../databases/main/event_federation.py | 19 ++++++++++-------- synapse/storage/types.py | 6 +++--- 4 files changed, 32 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15432.misc diff --git a/changelog.d/15432.misc b/changelog.d/15432.misc new file mode 100644 index 000000000000..93ceaeafc9b9 --- /dev/null +++ b/changelog.d/15432.misc @@ -0,0 +1 @@ +Improve type hints. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 226ccc1671ad..1f5f5eb6f8c7 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -58,7 +58,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine -from synapse.storage.types import Connection, Cursor +from synapse.storage.types import Connection, Cursor, SQLQueryParameters from synapse.util.async_helpers import delay_cancellation from synapse.util.iterutils import batch_iter @@ -371,10 +371,18 @@ def execute_batch(self, sql: str, args: Iterable[Iterable[Any]]) -> None: if isinstance(self.database_engine, PostgresEngine): from psycopg2.extras import execute_batch + # TODO: is it safe for values to be Iterable[Iterable[Any]] here? + # https://www.psycopg.org/docs/extras.html?highlight=execute_batch#psycopg2.extras.execute_batch + # suggests each arg in args should be a sequence or mapping self._do_execute( lambda the_sql: execute_batch(self.txn, the_sql, args), sql ) else: + # TODO: is it safe for values to be Iterable[Iterable[Any]] here? + # https://docs.python.org/3/library/sqlite3.html?highlight=sqlite3#sqlite3.Cursor.executemany + # suggests that the outer collection may be iterable, but + # https://docs.python.org/3/library/sqlite3.html?highlight=sqlite3#how-to-use-placeholders-to-bind-values-in-sql-queries + # suggests that the inner collection should be a sequence or dict. self.executemany(sql, args) def execute_values( @@ -390,14 +398,20 @@ def execute_values( from psycopg2.extras import execute_values return self._do_execute( + # TODO: is it safe for values to be Iterable[Iterable[Any]] here? + # https://www.psycopg.org/docs/extras.html?highlight=execute_batch#psycopg2.extras.execute_values says values should be Sequence[Sequence] lambda the_sql: execute_values(self.txn, the_sql, values, fetch=fetch), sql, ) - def execute(self, sql: str, *args: Any) -> None: - self._do_execute(self.txn.execute, sql, *args) + def execute(self, sql: str, parameters: SQLQueryParameters = ()) -> None: + self._do_execute(self.txn.execute, sql, parameters) def executemany(self, sql: str, *args: Any) -> None: + # TODO: we should add a type for *args here. Looking at Cursor.executemany + # and DBAPI2 it ought to be Sequence[_Parameter], but we pass in + # Iterable[Iterable[Any]] in execute_batch and execute_values above, which mypy + # complains about. self._do_execute(self.txn.executemany, sql, *args) def executescript(self, sql: str) -> None: diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 2ad6fa7d5efe..ac19de183cb6 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -114,6 +114,10 @@ def __init__(self, room_id: str): class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBaseStore): + # TODO: this attribute comes from EventPushActionWorkerStore. Should we inherit from + # that store so that mypy can deduce this for itself? + stream_ordering_month_ago: Optional[int] + def __init__( self, database: DatabasePool, @@ -1182,8 +1186,8 @@ async def have_room_forward_extremities_changed_since( Throws a StoreError if we have since purged the index for stream_orderings from that point. """ - - if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined] + assert self.stream_ordering_month_ago is not None + if stream_ordering <= self.stream_ordering_month_ago: raise StoreError(400, f"stream_ordering too old {stream_ordering}") sql = """ @@ -1231,7 +1235,8 @@ async def get_forward_extremities_for_room_at_stream_ordering( # provided the last_change is recent enough, we now clamp the requested # stream_ordering to it. - if last_change > self.stream_ordering_month_ago: # type: ignore[attr-defined] + assert self.stream_ordering_month_ago is not None + if last_change > self.stream_ordering_month_ago: stream_ordering = min(last_change, stream_ordering) return await self._get_forward_extremeties_for_room(room_id, stream_ordering) @@ -1246,8 +1251,8 @@ async def _get_forward_extremeties_for_room( Throws a StoreError if we have since purged the index for stream_orderings from that point. """ - - if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined] + assert self.stream_ordering_month_ago is not None + if stream_ordering <= self.stream_ordering_month_ago: raise StoreError(400, "stream_ordering too old %s" % (stream_ordering,)) sql = """ @@ -1707,9 +1712,7 @@ def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None: DELETE FROM stream_ordering_to_exterm WHERE stream_ordering < ? """ - txn.execute( - sql, (self.stream_ordering_month_ago,) # type: ignore[attr-defined] - ) + txn.execute(sql, (self.stream_ordering_month_ago,)) await self.db_pool.runInteraction( "_delete_old_forward_extrem_cache", diff --git a/synapse/storage/types.py b/synapse/storage/types.py index 56a00485393d..34ac80753012 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -31,14 +31,14 @@ Some very basic protocol definitions for the DB-API2 classes specified in PEP-249 """ -_Parameters = Union[Sequence[Any], Mapping[str, Any]] +SQLQueryParameters = Union[Sequence[Any], Mapping[str, Any]] class Cursor(Protocol): - def execute(self, sql: str, parameters: _Parameters = ...) -> Any: + def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ... - def executemany(self, sql: str, parameters: Sequence[_Parameters]) -> Any: + def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any: ... def fetchone(self) -> Optional[Tuple]: From c9326140dc9f8ea849356b5a8397e468636df8d4 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Fri, 14 Apr 2023 15:46:04 -0500 Subject: [PATCH 092/106] Refactor `SimpleHttpClient` to pull out reusable methods (#15427) Pulls out some methods to `BaseHttpClient` to eventually be reused in other contexts. --- changelog.d/15427.misc | 1 + synapse/http/client.py | 132 ++++++++++++++++++++++++----------------- 2 files changed, 77 insertions(+), 56 deletions(-) create mode 100644 changelog.d/15427.misc diff --git a/changelog.d/15427.misc b/changelog.d/15427.misc new file mode 100644 index 000000000000..ef873e3b2b49 --- /dev/null +++ b/changelog.d/15427.misc @@ -0,0 +1 @@ +Refactor `SimpleHttpClient` to pull out a base class. diff --git a/synapse/http/client.py b/synapse/http/client.py index b5cf8123ce84..91fe474f36d9 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -312,35 +312,27 @@ def request( ) -class SimpleHttpClient: +class BaseHttpClient: """ A simple, no-frills HTTP client with methods that wrap up common ways of - using HTTP in Matrix + using HTTP in Matrix. Does not come with a default Agent, subclasses will need to + define their own. + + Args: + hs: The HomeServer instance to pass in + treq_args: Extra keyword arguments to be given to treq.request. """ + agent: IAgent + def __init__( self, hs: "HomeServer", treq_args: Optional[Dict[str, Any]] = None, - ip_whitelist: Optional[IPSet] = None, - ip_blacklist: Optional[IPSet] = None, - use_proxy: bool = False, ): - """ - Args: - hs - treq_args: Extra keyword arguments to be given to treq.request. - ip_blacklist: The IP addresses that are blacklisted that - we may not request. - ip_whitelist: The whitelisted IP addresses, that we can - request if it were otherwise caught in a blacklist. - use_proxy: Whether proxy settings should be discovered and used - from conventional environment variables. - """ self.hs = hs + self.reactor = hs.get_reactor() - self._ip_whitelist = ip_whitelist - self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args or {} self.clock = hs.get_clock() @@ -356,44 +348,6 @@ def __init__( # reactor. self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor())) - if self._ip_blacklist: - # If we have an IP blacklist, we need to use a DNS resolver which - # filters out blacklisted IP addresses, to prevent DNS rebinding. - self.reactor: ISynapseReactor = BlacklistingReactorWrapper( - hs.get_reactor(), self._ip_whitelist, self._ip_blacklist - ) - else: - self.reactor = hs.get_reactor() - - # the pusher makes lots of concurrent SSL connections to sygnal, and - # tends to do so in batches, so we need to allow the pool to keep - # lots of idle connections around. - pool = HTTPConnectionPool(self.reactor) - # XXX: The justification for using the cache factor here is that larger instances - # will need both more cache and more connections. - # Still, this should probably be a separate dial - pool.maxPersistentPerHost = max(int(100 * hs.config.caches.global_factor), 5) - pool.cachedConnectionTimeout = 2 * 60 - - self.agent: IAgent = ProxyAgent( - self.reactor, - hs.get_reactor(), - connectTimeout=15, - contextFactory=self.hs.get_http_client_context_factory(), - pool=pool, - use_proxy=use_proxy, - ) - - if self._ip_blacklist: - # If we have an IP blacklist, we then install the blacklisting Agent - # which prevents direct access to IP addresses, that are not caught - # by the DNS resolution. - self.agent = BlacklistingAgentWrapper( - self.agent, - ip_blacklist=self._ip_blacklist, - ip_whitelist=self._ip_whitelist, - ) - async def request( self, method: str, @@ -799,6 +753,72 @@ async def get_file( ) +class SimpleHttpClient(BaseHttpClient): + """ + An HTTP client capable of crossing a proxy and respecting a block/allow list. + + This also configures a larger / longer lasting HTTP connection pool. + + Args: + hs: The HomeServer instance to pass in + treq_args: Extra keyword arguments to be given to treq.request. + ip_blacklist: The IP addresses that are blacklisted that + we may not request. + ip_whitelist: The whitelisted IP addresses, that we can + request if it were otherwise caught in a blacklist. + use_proxy: Whether proxy settings should be discovered and used + from conventional environment variables. + """ + + def __init__( + self, + hs: "HomeServer", + treq_args: Optional[Dict[str, Any]] = None, + ip_whitelist: Optional[IPSet] = None, + ip_blacklist: Optional[IPSet] = None, + use_proxy: bool = False, + ): + super().__init__(hs, treq_args=treq_args) + self._ip_whitelist = ip_whitelist + self._ip_blacklist = ip_blacklist + + if self._ip_blacklist: + # If we have an IP blacklist, we need to use a DNS resolver which + # filters out blacklisted IP addresses, to prevent DNS rebinding. + self.reactor: ISynapseReactor = BlacklistingReactorWrapper( + self.reactor, self._ip_whitelist, self._ip_blacklist + ) + + # the pusher makes lots of concurrent SSL connections to Sygnal, and tends to + # do so in batches, so we need to allow the pool to keep lots of idle + # connections around. + pool = HTTPConnectionPool(self.reactor) + # XXX: The justification for using the cache factor here is that larger + # instances will need both more cache and more connections. + # Still, this should probably be a separate dial + pool.maxPersistentPerHost = max(int(100 * hs.config.caches.global_factor), 5) + pool.cachedConnectionTimeout = 2 * 60 + + self.agent: IAgent = ProxyAgent( + self.reactor, + hs.get_reactor(), + connectTimeout=15, + contextFactory=self.hs.get_http_client_context_factory(), + pool=pool, + use_proxy=use_proxy, + ) + + if self._ip_blacklist: + # If we have an IP blacklist, we then install the blacklisting Agent + # which prevents direct access to IP addresses, that are not caught + # by the DNS resolution. + self.agent = BlacklistingAgentWrapper( + self.agent, + ip_blacklist=self._ip_blacklist, + ip_whitelist=self._ip_whitelist, + ) + + def _timeout_to_request_timed_out_error(f: Failure) -> Failure: if f.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError): # The TCP connection has its own timeout (set by the 'connectTimeout' param From 0475cae3ac5cb0126c1ce6b3349d445b9e242145 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:52:35 +0100 Subject: [PATCH 093/106] Bump serde_json from 1.0.95 to 1.0.96 (#15442) * Bump serde_json from 1.0.95 to 1.0.96 Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.95 to 1.0.96. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.95...v1.0.96) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 4 ++-- changelog.d/15442.misc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/15442.misc diff --git a/Cargo.lock b/Cargo.lock index 4a2c6af8f317..f4ab2fb3fb74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -343,9 +343,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" dependencies = [ "itoa", "ryu", diff --git a/changelog.d/15442.misc b/changelog.d/15442.misc new file mode 100644 index 000000000000..83fcb12a098c --- /dev/null +++ b/changelog.d/15442.misc @@ -0,0 +1 @@ +Bump serde_json from 1.0.95 to 1.0.96. From fce59ca5a1084383eb166091ffac7657036506e3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:53:13 +0100 Subject: [PATCH 094/106] Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0 (#15441) * Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0 Bumps [dawidd6/action-download-artifact](https://github.com/dawidd6/action-download-artifact) from 2.26.1 to 2.27.0. - [Release notes](https://github.com/dawidd6/action-download-artifact/releases) - [Commits](https://github.com/dawidd6/action-download-artifact/compare/7132ab516fba5f602fafae6fdd4822afa10db76f...246dbf436b23d7c49e21a7ab8204ca9ecd1fe615) --- updated-dependencies: - dependency-name: dawidd6/action-download-artifact dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- .github/workflows/docs-pr-netlify.yaml | 2 +- changelog.d/15441.misc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/15441.misc diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index 6cba6315c216..d613dd9e2636 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: đź“Ą Download artifact - uses: dawidd6/action-download-artifact@7132ab516fba5f602fafae6fdd4822afa10db76f # v2.26.1 + uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} diff --git a/changelog.d/15441.misc b/changelog.d/15441.misc new file mode 100644 index 000000000000..a46333f3a1eb --- /dev/null +++ b/changelog.d/15441.misc @@ -0,0 +1 @@ +Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0. From 49482222ca812e3b469746561e6d755a3b82494e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:53:40 +0100 Subject: [PATCH 095/106] Bump serde from 1.0.159 to 1.0.160 (#15443) * Bump serde from 1.0.159 to 1.0.160 Bumps [serde](https://github.com/serde-rs/serde) from 1.0.159 to 1.0.160. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.159...v1.0.160) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- Cargo.lock | 8 ++++---- changelog.d/15443.misc | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/15443.misc diff --git a/Cargo.lock b/Cargo.lock index f4ab2fb3fb74..f661eb532cdc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.159" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.159" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" dependencies = [ "proc-macro2", "quote", diff --git a/changelog.d/15443.misc b/changelog.d/15443.misc new file mode 100644 index 000000000000..c32c23aacb66 --- /dev/null +++ b/changelog.d/15443.misc @@ -0,0 +1 @@ +Bump serde from 1.0.159 to 1.0.160. From efab11825184a4251554f1e412549a4c23329d8b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:54:35 +0100 Subject: [PATCH 096/106] Bump pillow from 9.4.0 to 9.5.0 (#15444) * Bump pillow from 9.4.0 to 9.5.0 Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.4.0 to 9.5.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.4.0...9.5.0) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15444.misc | 1 + poetry.lock | 153 +++++++++++++++++++---------------------- 2 files changed, 72 insertions(+), 82 deletions(-) create mode 100644 changelog.d/15444.misc diff --git a/changelog.d/15444.misc b/changelog.d/15444.misc new file mode 100644 index 000000000000..32f1a0eba2d8 --- /dev/null +++ b/changelog.d/15444.misc @@ -0,0 +1 @@ +Bump pillow from 9.4.0 to 9.5.0. diff --git a/poetry.lock b/poetry.lock index 353b79fa5e39..2ebda51e7264 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1644,93 +1644,82 @@ files = [ [[package]] name = "pillow" -version = "9.4.0" +version = "9.5.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, - {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, - {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, - {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, - {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, - {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, - {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, - {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"}, - {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"}, - {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"}, - {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"}, - {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"}, - {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"}, - {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"}, - {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, - {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, - {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, - {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, - {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, - {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, - {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, - {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, - {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, - {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, - {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, - {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, - {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, - {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, - {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, - {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, - {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, - {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, - {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, - {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, - {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, - {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, - {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, - {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, - {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, - {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, - {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, - {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, - {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, - {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, - {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, - {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, - {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, - {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, - {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, - {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, - {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, - {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, - {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, - {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, - {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, - {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, - {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, - {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, - {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, - {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, - {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, - {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, - {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, - {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, - {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, - {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, - {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, - {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, - {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, - {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, - {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, - {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, - {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, - {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, - {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, - {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, - {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] + {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, + {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, + {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, + {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, + {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, + {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, + {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, + {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, + {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, + {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, + {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, + {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, + {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, + {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, + {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, + {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, + {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] From 3a82433ccf059afed0f37db479b6925b7621f4b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:55:03 +0100 Subject: [PATCH 097/106] Bump furo from 2023.3.23 to 2023.3.27 (#15445) * Bump furo from 2023.3.23 to 2023.3.27 Bumps [furo](https://github.com/pradyunsg/furo) from 2023.3.23 to 2023.3.27. - [Release notes](https://github.com/pradyunsg/furo/releases) - [Changelog](https://github.com/pradyunsg/furo/blob/main/docs/changelog.md) - [Commits](https://github.com/pradyunsg/furo/compare/2023.03.23...2023.03.27) --- updated-dependencies: - dependency-name: furo dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15445.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15445.misc diff --git a/changelog.d/15445.misc b/changelog.d/15445.misc new file mode 100644 index 000000000000..2ca214a7468d --- /dev/null +++ b/changelog.d/15445.misc @@ -0,0 +1 @@ +Bump furo from 2023.3.23 to 2023.3.27. diff --git a/poetry.lock b/poetry.lock index 2ebda51e7264..fc4ba8d27fd4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -580,14 +580,14 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler", [[package]] name = "furo" -version = "2023.3.23" +version = "2023.3.27" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "furo-2023.3.23-py3-none-any.whl", hash = "sha256:1cdf0730496f6ac0ecf394845fe55010539d987a3085f29d819e49a8e87da60a"}, - {file = "furo-2023.3.23.tar.gz", hash = "sha256:6cf9a59fe2919693ecff6509a08229afa081583cbb5b81f59c3e755f3bd81d26"}, + {file = "furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521"}, + {file = "furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5"}, ] [package.dependencies] From 745704ca691c8cbd24df4b6e6a2bf428bc013f67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:55:22 +0100 Subject: [PATCH 098/106] Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2 (#15446) * Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2 Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.1.0.0 to 23.1.0.2. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15446.misc | 1 + poetry.lock | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/15446.misc diff --git a/changelog.d/15446.misc b/changelog.d/15446.misc new file mode 100644 index 000000000000..0947909e2b5f --- /dev/null +++ b/changelog.d/15446.misc @@ -0,0 +1 @@ +Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2. diff --git a/poetry.lock b/poetry.lock index fc4ba8d27fd4..31cf156a44e7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3071,14 +3071,14 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.1.0.0" +version = "23.1.0.2" description = "Typing stubs for pyOpenSSL" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-pyOpenSSL-23.1.0.0.tar.gz", hash = "sha256:acc153718bff497e8f6ca3beecb5ea7a3087c796e40d569fded8bafbfca73605"}, - {file = "types_pyOpenSSL-23.1.0.0-py3-none-any.whl", hash = "sha256:9dacec020a3484ef5e4ea4bd9d403a981765b80821d5a40b790b2ba2f09d58db"}, + {file = "types-pyOpenSSL-23.1.0.2.tar.gz", hash = "sha256:20b80971b86240e8432a1832bd8124cea49c3088c7bfc77dfd23be27ffe4a517"}, + {file = "types_pyOpenSSL-23.1.0.2-py3-none-any.whl", hash = "sha256:b050641aeff6dfebf231ad719bdac12d53b8ee818d4afb67b886333484629957"}, ] [package.dependencies] From 838de27666366d57c6ffddff8e2a697e498e4856 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:55:54 +0100 Subject: [PATCH 099/106] Bump psycopg2 from 2.9.5 to 2.9.6 (#15448) * Bump psycopg2 from 2.9.5 to 2.9.6 Bumps [psycopg2](https://github.com/psycopg/psycopg2) from 2.9.5 to 2.9.6. - [Release notes](https://github.com/psycopg/psycopg2/releases) - [Changelog](https://github.com/psycopg/psycopg2/blob/master/NEWS) - [Commits](https://github.com/psycopg/psycopg2/commits/2.9.6) --- updated-dependencies: - dependency-name: psycopg2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15448.misc | 1 + poetry.lock | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 changelog.d/15448.misc diff --git a/changelog.d/15448.misc b/changelog.d/15448.misc new file mode 100644 index 000000000000..8e4e2576ef27 --- /dev/null +++ b/changelog.d/15448.misc @@ -0,0 +1 @@ +Bump psycopg2 from 2.9.5 to 2.9.6. diff --git a/poetry.lock b/poetry.lock index 31cf156a44e7..8c6c4ec0cc60 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1785,25 +1785,25 @@ twisted = ["twisted"] [[package]] name = "psycopg2" -version = "2.9.5" +version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"}, - {file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"}, - {file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"}, - {file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"}, - {file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"}, - {file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"}, - {file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"}, - {file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"}, - {file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"}, - {file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"}, - {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, + {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, + {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, + {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, + {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, + {file = "psycopg2-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744"}, + {file = "psycopg2-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39"}, + {file = "psycopg2-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889"}, + {file = "psycopg2-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258"}, + {file = "psycopg2-2.9.6-cp38-cp38-win32.whl", hash = "sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214"}, + {file = "psycopg2-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394"}, + {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, + {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, + {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, ] [[package]] From d935b806a52c967543f59def690aec0fa873d13c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Apr 2023 10:57:14 +0100 Subject: [PATCH 100/106] Bump mypy from 1.0.0 to 1.0.1 (#15447) * Bump mypy from 1.0.0 to 1.0.1 Bumps [mypy](https://github.com/python/mypy) from 1.0.0 to 1.0.1. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v1.0.0...v1.0.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Changelog --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: GitHub Actions --- changelog.d/15447.misc | 1 + poetry.lock | 54 +++++++++++++++++++++--------------------- 2 files changed, 28 insertions(+), 27 deletions(-) create mode 100644 changelog.d/15447.misc diff --git a/changelog.d/15447.misc b/changelog.d/15447.misc new file mode 100644 index 000000000000..16314e0fdcb3 --- /dev/null +++ b/changelog.d/15447.misc @@ -0,0 +1 @@ +Bump mypy from 1.0.0 to 1.0.1. diff --git a/poetry.lock b/poetry.lock index 8c6c4ec0cc60..f09a5b151ca9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1460,38 +1460,38 @@ files = [ [[package]] name = "mypy" -version = "1.0.0" +version = "1.0.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"}, - {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"}, - {file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"}, - {file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"}, - {file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"}, - {file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"}, - {file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"}, - {file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"}, - {file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"}, - {file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"}, - {file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"}, - {file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"}, - {file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"}, - {file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"}, - {file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"}, - {file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"}, - {file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"}, - {file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"}, - {file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"}, - {file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"}, - {file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"}, - {file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"}, - {file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"}, - {file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"}, - {file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"}, - {file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, + {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, + {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, + {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, + {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, + {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, + {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, + {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, + {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, + {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, + {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, + {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, + {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, + {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, + {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, + {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, + {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, + {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, + {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, ] [package.dependencies] From e12d788bb7526b7d9ab09bb3921b3cd17f20a939 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Mon, 17 Apr 2023 18:53:43 -0500 Subject: [PATCH 101/106] Switch `InstanceLocationConfig` to a pydantic `BaseModel` (#15431) * Switch InstanceLocationConfig to a pydantic BaseModel, apply Strict* types and add a few helper methods(that will make more sense in follow up work). Co-authored-by: David Robertson --- changelog.d/15431.feature | 1 + synapse/config/_util.py | 28 ++++++++++++++++++++- synapse/config/workers.py | 52 ++++++++++++++++++++++++++++++++------- 3 files changed, 71 insertions(+), 10 deletions(-) create mode 100644 changelog.d/15431.feature diff --git a/changelog.d/15431.feature b/changelog.d/15431.feature new file mode 100644 index 000000000000..4492406b49fb --- /dev/null +++ b/changelog.d/15431.feature @@ -0,0 +1 @@ +Add some validation to `instance_map` configuration loading. diff --git a/synapse/config/_util.py b/synapse/config/_util.py index d3a4b484abb8..dfc5d1221063 100644 --- a/synapse/config/_util.py +++ b/synapse/config/_util.py @@ -11,9 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Iterable +from typing import Any, Dict, Iterable, Type, TypeVar import jsonschema +from pydantic import BaseModel, ValidationError, parse_obj_as from synapse.config._base import ConfigError from synapse.types import JsonDict @@ -64,3 +65,28 @@ def json_error_to_config_error( else: path.append(str(p)) return ConfigError(e.message, path) + + +Model = TypeVar("Model", bound=BaseModel) + + +def parse_and_validate_mapping( + config: Any, + model_type: Type[Model], +) -> Dict[str, Model]: + """Parse `config` as a mapping from strings to a given `Model` type. + Args: + config: The configuration data to check + model_type: The BaseModel to validate and parse against. + Returns: + Fully validated and parsed Dict[str, Model]. + Raises: + ConfigError, if given improper input. + """ + try: + # type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because + # `model_type` is a runtime variable. Pydantic is fine with this. + instances = parse_obj_as(Dict[str, model_type], config) # type: ignore[valid-type] + except ValidationError as e: + raise ConfigError(str(e)) from e + return instances diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 1dfbe27e89a2..95b4047f1d3f 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -18,6 +18,7 @@ from typing import Any, Dict, List, Union import attr +from pydantic import BaseModel, Extra, StrictBool, StrictInt, StrictStr from synapse.config._base import ( Config, @@ -25,6 +26,7 @@ RoutableShardedWorkerHandlingConfig, ShardedWorkerHandlingConfig, ) +from synapse.config._util import parse_and_validate_mapping from synapse.config.server import ( DIRECT_TCP_ERROR, TCPListenerConfig, @@ -50,13 +52,43 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]: return obj -@attr.s(auto_attribs=True) -class InstanceLocationConfig: +class ConfigModel(BaseModel): + """A custom version of Pydantic's BaseModel which + + - ignores unknown fields and + - does not allow fields to be overwritten after construction, + + but otherwise uses Pydantic's default behaviour. + + For now, ignore unknown fields. In the future, we could change this so that unknown + config values cause a ValidationError, provided the error messages are meaningful to + server operators. + + Subclassing in this way is recommended by + https://pydantic-docs.helpmanual.io/usage/model_config/#change-behaviour-globally + """ + + class Config: + # By default, ignore fields that we don't recognise. + extra = Extra.ignore + # By default, don't allow fields to be reassigned after parsing. + allow_mutation = False + + +class InstanceLocationConfig(ConfigModel): """The host and port to talk to an instance via HTTP replication.""" - host: str - port: int - tls: bool = False + host: StrictStr + port: StrictInt + tls: StrictBool = False + + def scheme(self) -> str: + """Hardcode a retrievable scheme based on self.tls""" + return "https" if self.tls else "http" + + def netloc(self) -> str: + """Nicely format the network location data""" + return f"{self.host}:{self.port}" @attr.s @@ -183,10 +215,12 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # A map from instance name to host/port of their HTTP replication endpoint. - instance_map = config.get("instance_map") or {} - self.instance_map = { - name: InstanceLocationConfig(**c) for name, c in instance_map.items() - } + self.instance_map: Dict[ + str, InstanceLocationConfig + ] = parse_and_validate_mapping( + config.get("instance_map", {}), + InstanceLocationConfig, + ) # Map from type of streams to source, c.f. WriterLocations. writers = config.get("stream_writers") or {} From 929797d939e6d55ad7b0838c361396f43903156a Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 17 Apr 2023 18:16:02 -0600 Subject: [PATCH 102/106] Add a note to the config documentation that the 'delete_stale_devices_after' job always runs on the main process (#15452) --- changelog.d/15452.doc | 1 + docs/usage/configuration/config_documentation.md | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 changelog.d/15452.doc diff --git a/changelog.d/15452.doc b/changelog.d/15452.doc new file mode 100644 index 000000000000..330c90b3d672 --- /dev/null +++ b/changelog.d/15452.doc @@ -0,0 +1 @@ +Note that the `delete_stale_devices_after` background job always runs on the main process. \ No newline at end of file diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index c5c2c2b6152e..1b6f2569490e 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -577,6 +577,10 @@ delete any device that hasn't been accessed for more than the specified amount o Defaults to no duration, which means devices are never pruned. +**Note:** This task will always run on the main process, regardless of the value of +`run_background_tasks_on`. This is due to workers currently not having the ability to +delete devices. + Example configuration: ```yaml delete_stale_devices_after: 1y From aec639e3e33f5ca2f3456c715d28fd7a63c63c8a Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 17 Apr 2023 18:57:40 -0600 Subject: [PATCH 103/106] Move Spam Checker callbacks to a dedicated file (#15453) --- changelog.d/15453.misc | 1 + synapse/app/_base.py | 2 +- synapse/federation/federation_base.py | 6 ++-- synapse/federation/federation_server.py | 8 +++-- synapse/handlers/directory.py | 14 +++++--- synapse/handlers/federation.py | 4 +-- synapse/handlers/message.py | 10 ++++-- synapse/handlers/register.py | 4 +-- synapse/handlers/room.py | 10 ++++-- synapse/handlers/room_member.py | 22 ++++++++----- synapse/handlers/user_directory.py | 6 ++-- synapse/media/media_storage.py | 7 ++-- synapse/module_api/__init__.py | 33 +++++++++---------- synapse/module_api/callbacks/__init__.py | 11 ++++++- .../callbacks/spamchecker_callbacks.py} | 3 +- synapse/server.py | 7 +--- tests/handlers/test_user_directory.py | 2 +- tests/media/test_media_storage.py | 2 +- tests/rest/client/test_rooms.py | 26 +++++++++++---- tests/server.py | 2 +- 20 files changed, 107 insertions(+), 73 deletions(-) create mode 100644 changelog.d/15453.misc rename synapse/{events/spamcheck.py => module_api/callbacks/spamchecker_callbacks.py} (99%) diff --git a/changelog.d/15453.misc b/changelog.d/15453.misc new file mode 100644 index 000000000000..9981606c3226 --- /dev/null +++ b/changelog.d/15453.misc @@ -0,0 +1 @@ +Move various module API callback registration methods to a dedicated class. \ No newline at end of file diff --git a/synapse/app/_base.py b/synapse/app/_base.py index f7b866978cca..954402e4d2cc 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -64,7 +64,6 @@ from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig from synapse.crypto import context_factory from synapse.events.presence_router import load_legacy_presence_router -from synapse.events.spamcheck import load_legacy_spam_checkers from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.http.site import SynapseSite @@ -73,6 +72,7 @@ from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.jemalloc import setup_jemalloc_stats +from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers from synapse.types import ISynapseReactor from synapse.util import SYNAPSE_VERSION from synapse.util.caches.lrucache import setup_expire_lru_cache_entries diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 29fae716f589..3df975958da2 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -51,7 +51,7 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self.keyring = hs.get_keyring() - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.store = hs.get_datastores().main self._clock = hs.get_clock() self._storage_controllers = hs.get_storage_controllers() @@ -137,9 +137,9 @@ async def _check_sigs_and_hash( ) return redacted_event - spam_check = await self.spam_checker.check_event_for_spam(pdu) + spam_check = await self._spam_checker_module_callbacks.check_event_for_spam(pdu) - if spam_check != self.spam_checker.NOT_SPAM: + if spam_check != self._spam_checker_module_callbacks.NOT_SPAM: logger.warning("Event contains spam, soft-failing %s", pdu.event_id) log_kv( { diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 64e99292ec04..d7740eb3b448 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -130,7 +130,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(hs) self.handler = hs.get_federation_handler() - self._spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self._federation_event_handler = hs.get_federation_event_handler() self.state = hs.get_state_handler() self._event_auth_handler = hs.get_event_auth_handler() @@ -1129,7 +1129,7 @@ async def _handle_received_pdu(self, origin: str, pdu: EventBase) -> None: logger.warning("event id %s: %s", pdu.event_id, e) raise FederationError("ERROR", 403, str(e), affected=pdu.event_id) - if await self._spam_checker.should_drop_federated_event(pdu): + if await self._spam_checker_module_callbacks.should_drop_federated_event(pdu): logger.warning( "Unstaged federated event contains spam, dropping %s", pdu.event_id ) @@ -1174,7 +1174,9 @@ async def _get_next_nonspam_staged_event_for_room( origin, event = next - if await self._spam_checker.should_drop_federated_event(event): + if await self._spam_checker_module_callbacks.should_drop_federated_event( + event + ): logger.warning( "Staged federated event contains spam, dropping %s", event.event_id, diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 1fb23cc9bf51..5e8316e2e511 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -60,7 +60,7 @@ def __init__(self, hs: "HomeServer"): "directory", self.on_directory_query ) - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker async def _create_association( self, @@ -145,10 +145,12 @@ async def create_association( 403, "You must be in the room to create an alias for it" ) - spam_check = await self.spam_checker.user_may_create_room_alias( - user_id, room_alias + spam_check = ( + await self._spam_checker_module_callbacks.user_may_create_room_alias( + user_id, room_alias + ) ) - if spam_check != self.spam_checker.NOT_SPAM: + if spam_check != self._spam_checker_module_callbacks.NOT_SPAM: raise AuthError( 403, "This user is not permitted to create this alias", @@ -444,7 +446,9 @@ async def edit_published_room_list( """ user_id = requester.user.to_string() - spam_check = await self.spam_checker.user_may_publish_room(user_id, room_id) + spam_check = await self._spam_checker_module_callbacks.user_may_publish_room( + user_id, room_id + ) if spam_check != NOT_SPAM: raise AuthError( 403, diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 65461a078723..d1a88cc60460 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -141,7 +141,7 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self.keyring = hs.get_keyring() self.is_mine_id = hs.is_mine_id - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.event_creation_handler = hs.get_event_creation_handler() self.event_builder_factory = hs.get_event_builder_factory() self._event_auth_handler = hs.get_event_auth_handler() @@ -1042,7 +1042,7 @@ async def on_invite_request( if self.hs.config.server.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") - spam_check = await self.spam_checker.user_may_invite( + spam_check = await self._spam_checker_module_callbacks.user_may_invite( event.sender, event.state_key, event.room_id ) if spam_check != NOT_SPAM: diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index a17fe3bf530b..2e964ed37e9a 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -508,7 +508,7 @@ def __init__(self, hs: "HomeServer"): self._bulk_push_rule_evaluator = hs.get_bulk_push_rule_evaluator() - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.third_party_event_rules: "ThirdPartyEventRules" = ( self.hs.get_third_party_event_rules() ) @@ -1035,8 +1035,12 @@ async def create_and_send_nonmember_event( event.sender, ) - spam_check_result = await self.spam_checker.check_event_for_spam(event) - if spam_check_result != self.spam_checker.NOT_SPAM: + spam_check_result = ( + await self._spam_checker_module_callbacks.check_event_for_spam( + event + ) + ) + if spam_check_result != self._spam_checker_module_callbacks.NOT_SPAM: if isinstance(spam_check_result, tuple): try: [code, dict] = spam_check_result diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c8bf2439afb5..61c4b833bd30 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -110,7 +110,7 @@ def __init__(self, hs: "HomeServer"): self._server_notices_mxid = hs.config.servernotices.server_notices_mxid self._server_name = hs.hostname - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker if hs.config.worker.worker_app: self._register_client = ReplicationRegisterServlet.make_client(hs) @@ -259,7 +259,7 @@ async def register_user( await self.check_registration_ratelimit(address) - result = await self.spam_checker.check_registration_for_spam( + result = await self._spam_checker_module_callbacks.check_registration_for_spam( threepid, localpart, user_agent_ips or [], diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 2d69cabf43d0..efd9612d90ca 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -106,7 +106,7 @@ def __init__(self, hs: "HomeServer"): self.auth_blocking = hs.get_auth_blocking() self.clock = hs.get_clock() self.hs = hs - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.event_creation_handler = hs.get_event_creation_handler() self.room_member_handler = hs.get_room_member_handler() self._event_auth_handler = hs.get_event_auth_handler() @@ -449,7 +449,9 @@ async def clone_existing_room( """ user_id = requester.user.to_string() - spam_check = await self.spam_checker.user_may_create_room(user_id) + spam_check = await self._spam_checker_module_callbacks.user_may_create_room( + user_id + ) if spam_check != NOT_SPAM: raise SynapseError( 403, @@ -761,7 +763,9 @@ async def create_room( ) if not is_requester_admin: - spam_check = await self.spam_checker.user_may_create_room(user_id) + spam_check = await self._spam_checker_module_callbacks.user_may_create_room( + user_id + ) if spam_check != NOT_SPAM: raise SynapseError( 403, diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 1d8b0aee6ff7..ec317e60239a 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -96,7 +96,7 @@ def __init__(self, hs: "HomeServer"): self.member_as_limiter = Linearizer(max_count=10, name="member_as_limiter") self.clock = hs.get_clock() - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.third_party_event_rules = hs.get_third_party_event_rules() self._server_notices_mxid = self.config.servernotices.server_notices_mxid self._enable_lookup = hs.config.registration.enable_3pid_lookup @@ -806,7 +806,7 @@ async def update_membership_locked( ) block_invite_result = (Codes.FORBIDDEN, {}) - spam_check = await self.spam_checker.user_may_invite( + spam_check = await self._spam_checker_module_callbacks.user_may_invite( requester.user.to_string(), target_id, room_id ) if spam_check != NOT_SPAM: @@ -940,8 +940,10 @@ async def update_membership_locked( # a room then they're allowed to join it. and not new_room ): - spam_check = await self.spam_checker.user_may_join_room( - target.to_string(), room_id, is_invited=inviter is not None + spam_check = ( + await self._spam_checker_module_callbacks.user_may_join_room( + target.to_string(), room_id, is_invited=inviter is not None + ) ) if spam_check != NOT_SPAM: raise SynapseError( @@ -1550,11 +1552,13 @@ async def do_3pid_invite( ) else: # Check if the spamchecker(s) allow this invite to go through. - spam_check = await self.spam_checker.user_may_send_3pid_invite( - inviter_userid=requester.user.to_string(), - medium=medium, - address=address, - room_id=room_id, + spam_check = ( + await self._spam_checker_module_callbacks.user_may_send_3pid_invite( + inviter_userid=requester.user.to_string(), + medium=medium, + address=address, + room_id=room_id, + ) ) if spam_check != NOT_SPAM: raise SynapseError( diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 28a92d41d6fc..05197edc9546 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -94,7 +94,7 @@ def __init__(self, hs: "HomeServer"): self.is_mine_id = hs.is_mine_id self.update_user_directory = hs.config.worker.should_update_user_directory self.search_all_users = hs.config.userdirectory.user_directory_search_all_users - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self._hs = hs # The current position in the current_state_delta stream @@ -149,7 +149,9 @@ async def search_users( # Remove any spammy users from the results. non_spammy_users = [] for user in results["results"]: - if not await self.spam_checker.check_username_for_spam(user): + if not await self._spam_checker_module_callbacks.check_username_for_spam( + user + ): non_spammy_users.append(user) results["results"] = non_spammy_users diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index a7e22a91e115..a819d954070a 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -36,7 +36,6 @@ from twisted.internet.interfaces import IConsumer from twisted.protocols.basic import FileSender -import synapse from synapse.api.errors import NotFoundError from synapse.logging.context import defer_to_thread, make_deferred_yieldable from synapse.util import Clock @@ -74,7 +73,7 @@ def __init__( self.local_media_directory = local_media_directory self.filepaths = filepaths self.storage_providers = storage_providers - self.spam_checker = hs.get_spam_checker() + self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker self.clock = hs.get_clock() async def store_file(self, source: IO, file_info: FileInfo) -> str: @@ -145,10 +144,10 @@ async def finish() -> None: f.flush() f.close() - spam_check = await self.spam_checker.check_media_file_for_spam( + spam_check = await self._spam_checker_module_callbacks.check_media_file_for_spam( ReadableFileWrapper(self.clock, fname), file_info ) - if spam_check != synapse.module_api.NOT_SPAM: + if spam_check != self._spam_checker_module_callbacks.NOT_SPAM: logger.info("Blocking media due to spam checker") # Note that we'll delete the stored media, due to the # try/except below. The media also won't be stored in diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 595c23e78d7e..eeafea74d15c 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -44,20 +44,6 @@ GET_USERS_FOR_STATES_CALLBACK, PresenceRouter, ) -from synapse.events.spamcheck import ( - CHECK_EVENT_FOR_SPAM_CALLBACK, - CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK, - CHECK_REGISTRATION_FOR_SPAM_CALLBACK, - CHECK_USERNAME_FOR_SPAM_CALLBACK, - SHOULD_DROP_FEDERATED_EVENT_CALLBACK, - USER_MAY_CREATE_ROOM_ALIAS_CALLBACK, - USER_MAY_CREATE_ROOM_CALLBACK, - USER_MAY_INVITE_CALLBACK, - USER_MAY_JOIN_ROOM_CALLBACK, - USER_MAY_PUBLISH_ROOM_CALLBACK, - USER_MAY_SEND_3PID_INVITE_CALLBACK, - SpamChecker, -) from synapse.events.third_party_rules import ( CHECK_CAN_DEACTIVATE_USER_CALLBACK, CHECK_CAN_SHUTDOWN_ROOM_CALLBACK, @@ -105,6 +91,20 @@ ON_LEGACY_SEND_MAIL_CALLBACK, ON_USER_REGISTRATION_CALLBACK, ) +from synapse.module_api.callbacks.spamchecker_callbacks import ( + CHECK_EVENT_FOR_SPAM_CALLBACK, + CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK, + CHECK_REGISTRATION_FOR_SPAM_CALLBACK, + CHECK_USERNAME_FOR_SPAM_CALLBACK, + SHOULD_DROP_FEDERATED_EVENT_CALLBACK, + USER_MAY_CREATE_ROOM_ALIAS_CALLBACK, + USER_MAY_CREATE_ROOM_CALLBACK, + USER_MAY_INVITE_CALLBACK, + USER_MAY_JOIN_ROOM_CALLBACK, + USER_MAY_PUBLISH_ROOM_CALLBACK, + USER_MAY_SEND_3PID_INVITE_CALLBACK, + SpamCheckerModuleApiCallbacks, +) from synapse.rest.client.login import LoginResponse from synapse.storage import DataStore from synapse.storage.background_updates import ( @@ -147,7 +147,7 @@ """ PRESENCE_ALL_USERS = PresenceRouter.ALL_USERS -NOT_SPAM = SpamChecker.NOT_SPAM +NOT_SPAM = SpamCheckerModuleApiCallbacks.NOT_SPAM __all__ = [ "errors", @@ -271,7 +271,6 @@ def __init__(self, hs: "HomeServer", auth_handler: AuthHandler) -> None: self._public_room_list_manager = PublicRoomListManager(hs) self._account_data_manager = AccountDataManager(hs) - self._spam_checker = hs.get_spam_checker() self._third_party_event_rules = hs.get_third_party_event_rules() self._password_auth_provider = hs.get_password_auth_provider() self._presence_router = hs.get_presence_router() @@ -305,7 +304,7 @@ def register_spam_checker_callbacks( Added in Synapse v1.37.0. """ - return self._spam_checker.register_callbacks( + return self._callbacks.spam_checker.register_callbacks( check_event_for_spam=check_event_for_spam, should_drop_federated_event=should_drop_federated_event, user_may_join_room=user_may_join_room, diff --git a/synapse/module_api/callbacks/__init__.py b/synapse/module_api/callbacks/__init__.py index 3d977bf6558a..5cdb2c003a7c 100644 --- a/synapse/module_api/callbacks/__init__.py +++ b/synapse/module_api/callbacks/__init__.py @@ -12,11 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from synapse.server import HomeServer + from synapse.module_api.callbacks.account_validity_callbacks import ( AccountValidityModuleApiCallbacks, ) +from synapse.module_api.callbacks.spamchecker_callbacks import ( + SpamCheckerModuleApiCallbacks, +) class ModuleApiCallbacks: - def __init__(self) -> None: + def __init__(self, hs: "HomeServer") -> None: self.account_validity = AccountValidityModuleApiCallbacks() + self.spam_checker = SpamCheckerModuleApiCallbacks(hs) diff --git a/synapse/events/spamcheck.py b/synapse/module_api/callbacks/spamchecker_callbacks.py similarity index 99% rename from synapse/events/spamcheck.py rename to synapse/module_api/callbacks/spamchecker_callbacks.py index 765c15bb5135..4456d1b81eb2 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -286,11 +286,10 @@ def run(*args: Any, **kwargs: Any) -> Awaitable: api.register_spam_checker_callbacks(**hooks) -class SpamChecker: +class SpamCheckerModuleApiCallbacks: NOT_SPAM: Literal["NOT_SPAM"] = "NOT_SPAM" def __init__(self, hs: "synapse.server.HomeServer") -> None: - self.hs = hs self.clock = hs.get_clock() self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] diff --git a/synapse/server.py b/synapse/server.py index a191c1999347..559724594b89 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -42,7 +42,6 @@ from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory from synapse.events.presence_router import PresenceRouter -from synapse.events.spamcheck import SpamChecker from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.events.utils import EventClientSerializer from synapse.federation.federation_client import FederationClient @@ -687,10 +686,6 @@ def get_user_directory_handler(self) -> UserDirectoryHandler: def get_stats_handler(self) -> StatsHandler: return StatsHandler(self) - @cache_in_self - def get_spam_checker(self) -> SpamChecker: - return SpamChecker(self) - @cache_in_self def get_third_party_event_rules(self) -> ThirdPartyEventRules: return ThirdPartyEventRules(self) @@ -803,7 +798,7 @@ def get_module_api(self) -> ModuleApi: @cache_in_self def get_module_api_callbacks(self) -> ModuleApiCallbacks: - return ModuleApiCallbacks() + return ModuleApiCallbacks(self) @cache_in_self def get_account_data_handler(self) -> AccountDataHandler: diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index da4d24082648..15a7dc681854 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -792,7 +792,7 @@ async def allow_all(user_profile: UserProfile) -> bool: return False # Configure a spam checker that does not filter any users. - spam_checker = self.hs.get_spam_checker() + spam_checker = self.hs.get_module_api_callbacks().spam_checker spam_checker._check_username_for_spam_callbacks = [allow_all] # The results do not change: diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index 870047d0f250..f0f2da65db0b 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -31,7 +31,6 @@ from synapse.api.errors import Codes from synapse.events import EventBase -from synapse.events.spamcheck import load_legacy_spam_checkers from synapse.http.types import QueryParams from synapse.logging.context import make_deferred_yieldable from synapse.media._base import FileInfo @@ -39,6 +38,7 @@ from synapse.media.media_storage import MediaStorage, ReadableFileWrapper from synapse.media.storage_provider import FileStorageProviderBackend from synapse.module_api import ModuleApi +from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers from synapse.rest import admin from synapse.rest.client import login from synapse.server import HomeServer diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index a4900703c415..4d39c89f6f19 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -814,7 +814,9 @@ async def user_may_join_room( return False join_mock = Mock(side_effect=user_may_join_room) - self.hs.get_spam_checker()._user_may_join_room_callbacks.append(join_mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_join_room_callbacks.append( + join_mock + ) channel = self.make_request( "POST", @@ -840,7 +842,9 @@ async def user_may_join_room_codes( return Codes.CONSENT_NOT_GIVEN join_mock = Mock(side_effect=user_may_join_room_codes) - self.hs.get_spam_checker()._user_may_join_room_callbacks.append(join_mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_join_room_callbacks.append( + join_mock + ) channel = self.make_request( "POST", @@ -1162,7 +1166,9 @@ async def user_may_join_room( # `spec` argument is needed for this function mock to have `__qualname__`, which # is needed for `Measure` metrics buried in SpamChecker. callback_mock = Mock(side_effect=user_may_join_room, spec=lambda *x: None) - self.hs.get_spam_checker()._user_may_join_room_callbacks.append(callback_mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_join_room_callbacks.append( + callback_mock + ) # Join a first room, without being invited to it. self.helper.join(self.room1, self.user2, tok=self.tok2) @@ -1227,7 +1233,9 @@ async def user_may_join_room( # `spec` argument is needed for this function mock to have `__qualname__`, which # is needed for `Measure` metrics buried in SpamChecker. callback_mock = Mock(side_effect=user_may_join_room, spec=lambda *x: None) - self.hs.get_spam_checker()._user_may_join_room_callbacks.append(callback_mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_join_room_callbacks.append( + callback_mock + ) # Join a first room, without being invited to it. self.helper.join(self.room1, self.user2, tok=self.tok2) @@ -1643,7 +1651,7 @@ async def check_event_for_spam( spam_checker = SpamCheck() - self.hs.get_spam_checker()._check_event_for_spam_callbacks.append( + self.hs.get_module_api_callbacks().spam_checker._check_event_for_spam_callbacks.append( spam_checker.check_event_for_spam ) @@ -3381,7 +3389,9 @@ def test_threepid_invite_spamcheck_deprecated(self) -> None: # `spec` argument is needed for this function mock to have `__qualname__`, which # is needed for `Measure` metrics buried in SpamChecker. mock = Mock(return_value=make_awaitable(True), spec=lambda *x: None) - self.hs.get_spam_checker()._user_may_send_3pid_invite_callbacks.append(mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_send_3pid_invite_callbacks.append( + mock + ) # Send a 3PID invite into the room and check that it succeeded. email_to_invite = "teresa@example.com" @@ -3446,7 +3456,9 @@ def test_threepid_invite_spamcheck(self) -> None: return_value=make_awaitable(synapse.module_api.NOT_SPAM), spec=lambda *x: None, ) - self.hs.get_spam_checker()._user_may_send_3pid_invite_callbacks.append(mock) + self.hs.get_module_api_callbacks().spam_checker._user_may_send_3pid_invite_callbacks.append( + mock + ) # Send a 3PID invite into the room and check that it succeeded. email_to_invite = "teresa@example.com" diff --git a/tests/server.py b/tests/server.py index b52ff1c4632c..a49dc90e3272 100644 --- a/tests/server.py +++ b/tests/server.py @@ -73,11 +73,11 @@ from synapse.config.database import DatabaseConnectionConfig from synapse.config.homeserver import HomeServerConfig from synapse.events.presence_router import load_legacy_presence_router -from synapse.events.spamcheck import load_legacy_spam_checkers from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.http.site import SynapseRequest from synapse.logging.context import ContextResourceUsage +from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers from synapse.server import HomeServer from synapse.storage import DataStore from synapse.storage.database import LoggingDatabaseConnection From ce007103030e281098cf4dccffed44581a32bd13 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 18 Apr 2023 09:52:44 +0100 Subject: [PATCH 104/106] 1.82.0rc1 --- CHANGES.md | 69 +++++++++++++++++++++++++++++++++++++++ changelog.d/15181.bugfix | 1 - changelog.d/15333.feature | 1 - changelog.d/15372.misc | 1 - changelog.d/15373.misc | 1 - changelog.d/15374.misc | 1 - changelog.d/15375.misc | 1 - changelog.d/15376.misc | 1 - changelog.d/15382.misc | 1 - changelog.d/15393.misc | 1 - changelog.d/15394.misc | 1 - changelog.d/15395.misc | 1 - changelog.d/15404.misc | 1 - changelog.d/15405.removal | 1 - changelog.d/15406.misc | 1 - changelog.d/15409.misc | 1 - changelog.d/15410.bugfix | 1 - changelog.d/15412.misc | 1 - changelog.d/15413.misc | 1 - changelog.d/15414.misc | 1 - changelog.d/15415.misc | 1 - changelog.d/15421.misc | 1 - changelog.d/15423.bugfix | 1 - changelog.d/15425.bugfix | 1 - changelog.d/15427.misc | 1 - changelog.d/15428.bugfix | 1 - changelog.d/15429.misc | 1 - changelog.d/15431.feature | 1 - changelog.d/15432.misc | 1 - changelog.d/15433.misc | 1 - changelog.d/15435.misc | 1 - changelog.d/15436.feature | 1 - changelog.d/15438.misc | 1 - changelog.d/15441.misc | 1 - changelog.d/15442.misc | 1 - changelog.d/15443.misc | 1 - changelog.d/15444.misc | 1 - changelog.d/15445.misc | 1 - changelog.d/15446.misc | 1 - changelog.d/15447.misc | 1 - changelog.d/15448.misc | 1 - changelog.d/15452.doc | 1 - changelog.d/15453.misc | 1 - debian/changelog | 6 ++++ pyproject.toml | 2 +- 45 files changed, 76 insertions(+), 43 deletions(-) delete mode 100644 changelog.d/15181.bugfix delete mode 100644 changelog.d/15333.feature delete mode 100644 changelog.d/15372.misc delete mode 100644 changelog.d/15373.misc delete mode 100644 changelog.d/15374.misc delete mode 100644 changelog.d/15375.misc delete mode 100644 changelog.d/15376.misc delete mode 100644 changelog.d/15382.misc delete mode 100644 changelog.d/15393.misc delete mode 100644 changelog.d/15394.misc delete mode 100644 changelog.d/15395.misc delete mode 100644 changelog.d/15404.misc delete mode 100644 changelog.d/15405.removal delete mode 100644 changelog.d/15406.misc delete mode 100644 changelog.d/15409.misc delete mode 100644 changelog.d/15410.bugfix delete mode 100644 changelog.d/15412.misc delete mode 100644 changelog.d/15413.misc delete mode 100644 changelog.d/15414.misc delete mode 100644 changelog.d/15415.misc delete mode 100644 changelog.d/15421.misc delete mode 100644 changelog.d/15423.bugfix delete mode 100644 changelog.d/15425.bugfix delete mode 100644 changelog.d/15427.misc delete mode 100644 changelog.d/15428.bugfix delete mode 100644 changelog.d/15429.misc delete mode 100644 changelog.d/15431.feature delete mode 100644 changelog.d/15432.misc delete mode 100644 changelog.d/15433.misc delete mode 100644 changelog.d/15435.misc delete mode 100644 changelog.d/15436.feature delete mode 100644 changelog.d/15438.misc delete mode 100644 changelog.d/15441.misc delete mode 100644 changelog.d/15442.misc delete mode 100644 changelog.d/15443.misc delete mode 100644 changelog.d/15444.misc delete mode 100644 changelog.d/15445.misc delete mode 100644 changelog.d/15446.misc delete mode 100644 changelog.d/15447.misc delete mode 100644 changelog.d/15448.misc delete mode 100644 changelog.d/15452.doc delete mode 100644 changelog.d/15453.misc diff --git a/CHANGES.md b/CHANGES.md index 182d7ddd899d..a1ff7437cb1a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,72 @@ +Synapse 1.82.0rc1 (2023-04-18) +============================== + +Features +-------- + +- Allow loading the `/directory/room/{roomAlias}` endpoint on workers. ([\#15333](https://github.com/matrix-org/synapse/issues/15333)) +- Add some validation to `instance_map` configuration loading. ([\#15431](https://github.com/matrix-org/synapse/issues/15431)) +- Allow loading the `/capabilities` endpoint on workers. ([\#15436](https://github.com/matrix-org/synapse/issues/15436)) + + +Bugfixes +-------- + +- Delete server-side backup keys when deactivating an account. ([\#15181](https://github.com/matrix-org/synapse/issues/15181)) +- Fix and document untold assumption that `on_logged_out` module hooks will be called before the deletion of pushers. ([\#15410](https://github.com/matrix-org/synapse/issues/15410)) +- Improve robustness when handling a perspective key response by deduplicating received server keys. ([\#15423](https://github.com/matrix-org/synapse/issues/15423)) +- Synapse now correctly fails to start if the config option `app_service_config_files` is not a list. ([\#15425](https://github.com/matrix-org/synapse/issues/15425)) +- Disable loading `RefreshTokenServlet` (`/_matrix/client/(r0|v3|unstable)/refresh`) on workers. ([\#15428](https://github.com/matrix-org/synapse/issues/15428)) + + +Improved Documentation +---------------------- + +- Note that the `delete_stale_devices_after` background job always runs on the main process. ([\#15452](https://github.com/matrix-org/synapse/issues/15452)) + + +Deprecations and Removals +------------------------- + +- Remove the broken, unspecced registration fallback. Note that the *login* fallback is unaffected by this change. ([\#15405](https://github.com/matrix-org/synapse/issues/15405)) + + +Internal Changes +---------------- + +- Bump black from 23.1.0 to 23.3.0. ([\#15372](https://github.com/matrix-org/synapse/issues/15372)) +- Bump pyopenssl from 23.1.0 to 23.1.1. ([\#15373](https://github.com/matrix-org/synapse/issues/15373)) +- Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9. ([\#15374](https://github.com/matrix-org/synapse/issues/15374)) +- Bump types-netaddr from 0.8.0.6 to 0.8.0.7. ([\#15375](https://github.com/matrix-org/synapse/issues/15375)) +- Bump types-opentracing from 2.4.10.3 to 2.4.10.4. ([\#15376](https://github.com/matrix-org/synapse/issues/15376)) +- Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1. ([\#15404](https://github.com/matrix-org/synapse/issues/15404)) +- Bump parameterized from 0.8.1 to 0.9.0. ([\#15412](https://github.com/matrix-org/synapse/issues/15412)) +- Bump types-pillow from 9.4.0.17 to 9.4.0.19. ([\#15413](https://github.com/matrix-org/synapse/issues/15413)) +- Bump sentry-sdk from 1.17.0 to 1.19.1. ([\#15414](https://github.com/matrix-org/synapse/issues/15414)) +- Bump immutabledict from 2.2.3 to 2.2.4. ([\#15415](https://github.com/matrix-org/synapse/issues/15415)) +- Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0. ([\#15441](https://github.com/matrix-org/synapse/issues/15441)) +- Bump serde_json from 1.0.95 to 1.0.96. ([\#15442](https://github.com/matrix-org/synapse/issues/15442)) +- Bump serde from 1.0.159 to 1.0.160. ([\#15443](https://github.com/matrix-org/synapse/issues/15443)) +- Bump pillow from 9.4.0 to 9.5.0. ([\#15444](https://github.com/matrix-org/synapse/issues/15444)) +- Bump furo from 2023.3.23 to 2023.3.27. ([\#15445](https://github.com/matrix-org/synapse/issues/15445)) +- Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2. ([\#15446](https://github.com/matrix-org/synapse/issues/15446)) +- Bump mypy from 1.0.0 to 1.0.1. ([\#15447](https://github.com/matrix-org/synapse/issues/15447)) +- Bump psycopg2 from 2.9.5 to 2.9.6. ([\#15448](https://github.com/matrix-org/synapse/issues/15448)) +- Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. ([\#15382](https://github.com/matrix-org/synapse/issues/15382), [\#15429](https://github.com/matrix-org/synapse/issues/15429)) +- Implement [MSC3989](https://github.com/matrix-org/matrix-spec-proposals/pull/3989) redaction algorithm. ([\#15393](https://github.com/matrix-org/synapse/issues/15393)) +- Implement [MSC2175](https://github.com/matrix-org/matrix-doc/pull/2175) to stop adding `creator` to create events. ([\#15394](https://github.com/matrix-org/synapse/issues/15394)) +- Implement [MSC2174](https://github.com/matrix-org/matrix-spec-proposals/pull/2174) to move the `redacts` key to a `content` property. ([\#15395](https://github.com/matrix-org/synapse/issues/15395)) +- Trust dtonlay/rust-toolchain in CI. ([\#15406](https://github.com/matrix-org/synapse/issues/15406)) +- Explicitly install Synapse during typechecking in CI. ([\#15409](https://github.com/matrix-org/synapse/issues/15409)) +- Only load the SSO redirect servlet if SSO is enabled. ([\#15421](https://github.com/matrix-org/synapse/issues/15421)) +- Refactor `SimpleHttpClient` to pull out a base class. ([\#15427](https://github.com/matrix-org/synapse/issues/15427)) +- Improve type hints. ([\#15432](https://github.com/matrix-org/synapse/issues/15432)) +- Convert async to normal tests in `TestSSOHandler`. ([\#15433](https://github.com/matrix-org/synapse/issues/15433)) +- Speed up the user directory background update. ([\#15435](https://github.com/matrix-org/synapse/issues/15435)) +- Disable directory listing for static resources in `/_matrix/static/`. ([\#15438](https://github.com/matrix-org/synapse/issues/15438)) +- Move various module API callback registration methods to a dedicated class. ([\#15453](https://github.com/matrix-org/synapse/issues/15453)) + + Synapse 1.81.0 (2023-04-11) =========================== diff --git a/changelog.d/15181.bugfix b/changelog.d/15181.bugfix deleted file mode 100644 index 191bb6f611a7..000000000000 --- a/changelog.d/15181.bugfix +++ /dev/null @@ -1 +0,0 @@ -Delete server-side backup keys when deactivating an account. \ No newline at end of file diff --git a/changelog.d/15333.feature b/changelog.d/15333.feature deleted file mode 100644 index 35ea89ad8982..000000000000 --- a/changelog.d/15333.feature +++ /dev/null @@ -1 +0,0 @@ -Allow loading `/directory/room/{roomAlias}` endpoint on workers. \ No newline at end of file diff --git a/changelog.d/15372.misc b/changelog.d/15372.misc deleted file mode 100644 index a191353ff0f3..000000000000 --- a/changelog.d/15372.misc +++ /dev/null @@ -1 +0,0 @@ -Bump black from 23.1.0 to 23.3.0. diff --git a/changelog.d/15373.misc b/changelog.d/15373.misc deleted file mode 100644 index e93801b0de93..000000000000 --- a/changelog.d/15373.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pyopenssl from 23.1.0 to 23.1.1. diff --git a/changelog.d/15374.misc b/changelog.d/15374.misc deleted file mode 100644 index e0c076719c1d..000000000000 --- a/changelog.d/15374.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9. diff --git a/changelog.d/15375.misc b/changelog.d/15375.misc deleted file mode 100644 index 7ea628047090..000000000000 --- a/changelog.d/15375.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-netaddr from 0.8.0.6 to 0.8.0.7. diff --git a/changelog.d/15376.misc b/changelog.d/15376.misc deleted file mode 100644 index 34d24dbf9c06..000000000000 --- a/changelog.d/15376.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-opentracing from 2.4.10.3 to 2.4.10.4. diff --git a/changelog.d/15382.misc b/changelog.d/15382.misc deleted file mode 100644 index c5b054d19e72..000000000000 --- a/changelog.d/15382.misc +++ /dev/null @@ -1 +0,0 @@ -Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. diff --git a/changelog.d/15393.misc b/changelog.d/15393.misc deleted file mode 100644 index 24483c8d785c..000000000000 --- a/changelog.d/15393.misc +++ /dev/null @@ -1 +0,0 @@ -Implement [MSC3989](https://github.com/matrix-org/matrix-spec-proposals/pull/3989) redaction algorithm. diff --git a/changelog.d/15394.misc b/changelog.d/15394.misc deleted file mode 100644 index 91e6540438a6..000000000000 --- a/changelog.d/15394.misc +++ /dev/null @@ -1 +0,0 @@ -Implement [MSC2175](https://github.com/matrix-org/matrix-doc/pull/2175) to stop adding `creator` to create events. diff --git a/changelog.d/15395.misc b/changelog.d/15395.misc deleted file mode 100644 index ee938452416c..000000000000 --- a/changelog.d/15395.misc +++ /dev/null @@ -1 +0,0 @@ -Implement [MSC2174](https://github.com/matrix-org/matrix-spec-proposals/pull/2174) to move the `redacts` key to a `content` property. diff --git a/changelog.d/15404.misc b/changelog.d/15404.misc deleted file mode 100644 index c5ad99073f1d..000000000000 --- a/changelog.d/15404.misc +++ /dev/null @@ -1 +0,0 @@ -Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1. diff --git a/changelog.d/15405.removal b/changelog.d/15405.removal deleted file mode 100644 index 83340041539f..000000000000 --- a/changelog.d/15405.removal +++ /dev/null @@ -1 +0,0 @@ -Remove the broken, unspecced registration fallback. Note that the *login* fallback is unaffected by this change. diff --git a/changelog.d/15406.misc b/changelog.d/15406.misc deleted file mode 100644 index d2f9eb0dd473..000000000000 --- a/changelog.d/15406.misc +++ /dev/null @@ -1 +0,0 @@ -Trust dtonlay/rust-toolchain in CI. diff --git a/changelog.d/15409.misc b/changelog.d/15409.misc deleted file mode 100644 index 007111da34f2..000000000000 --- a/changelog.d/15409.misc +++ /dev/null @@ -1 +0,0 @@ -Explicitly install Synapse during typechecking in CI. diff --git a/changelog.d/15410.bugfix b/changelog.d/15410.bugfix deleted file mode 100644 index eb540e33c5fe..000000000000 --- a/changelog.d/15410.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix and document untold assumption that `on_logged_out` module hooks will be called before pushers deletion. diff --git a/changelog.d/15412.misc b/changelog.d/15412.misc deleted file mode 100644 index bdfb84ee90a5..000000000000 --- a/changelog.d/15412.misc +++ /dev/null @@ -1 +0,0 @@ -Bump parameterized from 0.8.1 to 0.9.0. diff --git a/changelog.d/15413.misc b/changelog.d/15413.misc deleted file mode 100644 index 38acc41fef2f..000000000000 --- a/changelog.d/15413.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-pillow from 9.4.0.17 to 9.4.0.19. diff --git a/changelog.d/15414.misc b/changelog.d/15414.misc deleted file mode 100644 index 3716f40e2530..000000000000 --- a/changelog.d/15414.misc +++ /dev/null @@ -1 +0,0 @@ -Bump sentry-sdk from 1.17.0 to 1.19.1. diff --git a/changelog.d/15415.misc b/changelog.d/15415.misc deleted file mode 100644 index 47aeee0b2b48..000000000000 --- a/changelog.d/15415.misc +++ /dev/null @@ -1 +0,0 @@ -Bump immutabledict from 2.2.3 to 2.2.4. diff --git a/changelog.d/15421.misc b/changelog.d/15421.misc deleted file mode 100644 index 5deea3ac5b4b..000000000000 --- a/changelog.d/15421.misc +++ /dev/null @@ -1 +0,0 @@ -Only load the SSO redirect servlet if SSO is enabled. \ No newline at end of file diff --git a/changelog.d/15423.bugfix b/changelog.d/15423.bugfix deleted file mode 100644 index dfb60ddd2fc1..000000000000 --- a/changelog.d/15423.bugfix +++ /dev/null @@ -1 +0,0 @@ -Improve robustness when handling a perspective key response by deduplicating received server keys. \ No newline at end of file diff --git a/changelog.d/15425.bugfix b/changelog.d/15425.bugfix deleted file mode 100644 index fd104a63b3bd..000000000000 --- a/changelog.d/15425.bugfix +++ /dev/null @@ -1 +0,0 @@ -Synapse now correctly fails to start if the config option `app_service_config_files` is not a list. \ No newline at end of file diff --git a/changelog.d/15427.misc b/changelog.d/15427.misc deleted file mode 100644 index ef873e3b2b49..000000000000 --- a/changelog.d/15427.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor `SimpleHttpClient` to pull out a base class. diff --git a/changelog.d/15428.bugfix b/changelog.d/15428.bugfix deleted file mode 100644 index 1083f00b81d6..000000000000 --- a/changelog.d/15428.bugfix +++ /dev/null @@ -1 +0,0 @@ -Disable loading `RefreshTokenServlet` (`/_matrix/client/(r0|v3|unstable)/refresh`) on workers. \ No newline at end of file diff --git a/changelog.d/15429.misc b/changelog.d/15429.misc deleted file mode 100644 index c5b054d19e72..000000000000 --- a/changelog.d/15429.misc +++ /dev/null @@ -1 +0,0 @@ -Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. diff --git a/changelog.d/15431.feature b/changelog.d/15431.feature deleted file mode 100644 index 4492406b49fb..000000000000 --- a/changelog.d/15431.feature +++ /dev/null @@ -1 +0,0 @@ -Add some validation to `instance_map` configuration loading. diff --git a/changelog.d/15432.misc b/changelog.d/15432.misc deleted file mode 100644 index 93ceaeafc9b9..000000000000 --- a/changelog.d/15432.misc +++ /dev/null @@ -1 +0,0 @@ -Improve type hints. diff --git a/changelog.d/15433.misc b/changelog.d/15433.misc deleted file mode 100644 index f1d83506bcd9..000000000000 --- a/changelog.d/15433.misc +++ /dev/null @@ -1 +0,0 @@ -Convert async to normal tests in `TestSSOHandler`. \ No newline at end of file diff --git a/changelog.d/15435.misc b/changelog.d/15435.misc deleted file mode 100644 index e0f591b6d141..000000000000 --- a/changelog.d/15435.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up the user directory background update. diff --git a/changelog.d/15436.feature b/changelog.d/15436.feature deleted file mode 100644 index d83f8c3e4a57..000000000000 --- a/changelog.d/15436.feature +++ /dev/null @@ -1 +0,0 @@ -Allow loading `/capabilities` endpoint on workers. \ No newline at end of file diff --git a/changelog.d/15438.misc b/changelog.d/15438.misc deleted file mode 100644 index 1edcbac7e294..000000000000 --- a/changelog.d/15438.misc +++ /dev/null @@ -1 +0,0 @@ -Disable directory listing for static resources in `/_matrix/static/`. \ No newline at end of file diff --git a/changelog.d/15441.misc b/changelog.d/15441.misc deleted file mode 100644 index a46333f3a1eb..000000000000 --- a/changelog.d/15441.misc +++ /dev/null @@ -1 +0,0 @@ -Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0. diff --git a/changelog.d/15442.misc b/changelog.d/15442.misc deleted file mode 100644 index 83fcb12a098c..000000000000 --- a/changelog.d/15442.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde_json from 1.0.95 to 1.0.96. diff --git a/changelog.d/15443.misc b/changelog.d/15443.misc deleted file mode 100644 index c32c23aacb66..000000000000 --- a/changelog.d/15443.misc +++ /dev/null @@ -1 +0,0 @@ -Bump serde from 1.0.159 to 1.0.160. diff --git a/changelog.d/15444.misc b/changelog.d/15444.misc deleted file mode 100644 index 32f1a0eba2d8..000000000000 --- a/changelog.d/15444.misc +++ /dev/null @@ -1 +0,0 @@ -Bump pillow from 9.4.0 to 9.5.0. diff --git a/changelog.d/15445.misc b/changelog.d/15445.misc deleted file mode 100644 index 2ca214a7468d..000000000000 --- a/changelog.d/15445.misc +++ /dev/null @@ -1 +0,0 @@ -Bump furo from 2023.3.23 to 2023.3.27. diff --git a/changelog.d/15446.misc b/changelog.d/15446.misc deleted file mode 100644 index 0947909e2b5f..000000000000 --- a/changelog.d/15446.misc +++ /dev/null @@ -1 +0,0 @@ -Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2. diff --git a/changelog.d/15447.misc b/changelog.d/15447.misc deleted file mode 100644 index 16314e0fdcb3..000000000000 --- a/changelog.d/15447.misc +++ /dev/null @@ -1 +0,0 @@ -Bump mypy from 1.0.0 to 1.0.1. diff --git a/changelog.d/15448.misc b/changelog.d/15448.misc deleted file mode 100644 index 8e4e2576ef27..000000000000 --- a/changelog.d/15448.misc +++ /dev/null @@ -1 +0,0 @@ -Bump psycopg2 from 2.9.5 to 2.9.6. diff --git a/changelog.d/15452.doc b/changelog.d/15452.doc deleted file mode 100644 index 330c90b3d672..000000000000 --- a/changelog.d/15452.doc +++ /dev/null @@ -1 +0,0 @@ -Note that the `delete_stale_devices_after` background job always runs on the main process. \ No newline at end of file diff --git a/changelog.d/15453.misc b/changelog.d/15453.misc deleted file mode 100644 index 9981606c3226..000000000000 --- a/changelog.d/15453.misc +++ /dev/null @@ -1 +0,0 @@ -Move various module API callback registration methods to a dedicated class. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 9eee66fda3a0..4e621e3d91d8 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium + + * New Synapse release 1.82.0rc1. + + -- Synapse Packaging team Tue, 18 Apr 2023 09:47:30 +0100 + matrix-synapse-py3 (1.81.0) stable; urgency=medium * New Synapse release 1.81.0. diff --git a/pyproject.toml b/pyproject.toml index 3ffd96c50dc5..2998bbe95031 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.81.0" +version = "1.82.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" From 2f144dcdeec8b6eb00d79402e9e6199610a987b6 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 18 Apr 2023 10:56:39 +0100 Subject: [PATCH 105/106] Revert "Bump pillow from 9.4.0 to 9.5.0 (#15444)" This reverts commit efab11825184a4251554f1e412549a4c23329d8b. --- poetry.lock | 153 ++++++++++++++++++++++++++++------------------------ 1 file changed, 82 insertions(+), 71 deletions(-) diff --git a/poetry.lock b/poetry.lock index f09a5b151ca9..4f1ebbfe5320 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1644,82 +1644,93 @@ files = [ [[package]] name = "pillow" -version = "9.5.0" +version = "9.4.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, - {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, - {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, - {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, - {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, - {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, - {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, - {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, - {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, - {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, - {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, - {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, - {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, - {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, - {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, - {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, - {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, - {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, - {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, - {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, - {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, - {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, - {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, - {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, - {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, - {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, - {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, - {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, - {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, - {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, - {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, - {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, - {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, - {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, - {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, - {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, - {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, - {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"}, + {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"}, + {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"}, + {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"}, + {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"}, + {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"}, + {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] From c34791ef5b612d36c52216a4d9c84ee56ac00c28 Mon Sep 17 00:00:00 2001 From: David Robertson Date: Tue, 25 Apr 2023 11:56:17 +0100 Subject: [PATCH 106/106] 1.82.0 --- CHANGES.md | 6 ++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index a1ff7437cb1a..b2cc138ee302 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 1.82.0 (2023-04-25) +=========================== + +No significant changes since 1.82.0rc1. + + Synapse 1.82.0rc1 (2023-04-18) ============================== diff --git a/debian/changelog b/debian/changelog index 4e621e3d91d8..f6e8720e5894 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.82.0) stable; urgency=medium + + * New Synapse release 1.82.0. + + -- Synapse Packaging team Tue, 25 Apr 2023 11:56:06 +0100 + matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium * New Synapse release 1.82.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 2998bbe95031..c08352e4d3fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml" [tool.poetry] name = "matrix-synapse" -version = "1.82.0rc1" +version = "1.82.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0"